import numpy as np
import pandas as pd
import statistics as stat
import matplotlib.pyplot as plt
import seaborn as sns
import warnings
import os
warnings.filterwarnings("ignore")
data=pd.read_csv('/Users/ankitmehra/Downloads/datasets/data.csv')
data
| Unnamed: 0 | ID | Name | Age | Photo | Nationality | Flag | Overall | Potential | Club | ... | Composure | Marking | StandingTackle | SlidingTackle | GKDiving | GKHandling | GKKicking | GKPositioning | GKReflexes | Release Clause | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 158023 | L. Messi | 31 | https://cdn.sofifa.org/players/4/19/158023.png | Argentina | https://cdn.sofifa.org/flags/52.png | 94 | 94 | FC Barcelona | ... | 96.0 | 33.0 | 28.0 | 26.0 | 6.0 | 11.0 | 15.0 | 14.0 | 8.0 | €226.5M |
| 1 | 1 | 20801 | Cristiano Ronaldo | 33 | https://cdn.sofifa.org/players/4/19/20801.png | Portugal | https://cdn.sofifa.org/flags/38.png | 94 | 94 | Juventus | ... | 95.0 | 28.0 | 31.0 | 23.0 | 7.0 | 11.0 | 15.0 | 14.0 | 11.0 | €127.1M |
| 2 | 2 | 190871 | Neymar Jr | 26 | https://cdn.sofifa.org/players/4/19/190871.png | Brazil | https://cdn.sofifa.org/flags/54.png | 92 | 93 | Paris Saint-Germain | ... | 94.0 | 27.0 | 24.0 | 33.0 | 9.0 | 9.0 | 15.0 | 15.0 | 11.0 | €228.1M |
| 3 | 3 | 193080 | De Gea | 27 | https://cdn.sofifa.org/players/4/19/193080.png | Spain | https://cdn.sofifa.org/flags/45.png | 91 | 93 | Manchester United | ... | 68.0 | 15.0 | 21.0 | 13.0 | 90.0 | 85.0 | 87.0 | 88.0 | 94.0 | €138.6M |
| 4 | 4 | 192985 | K. De Bruyne | 27 | https://cdn.sofifa.org/players/4/19/192985.png | Belgium | https://cdn.sofifa.org/flags/7.png | 91 | 92 | Manchester City | ... | 88.0 | 68.0 | 58.0 | 51.0 | 15.0 | 13.0 | 5.0 | 10.0 | 13.0 | €196.4M |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 18202 | 18202 | 238813 | J. Lundstram | 19 | https://cdn.sofifa.org/players/4/19/238813.png | England | https://cdn.sofifa.org/flags/14.png | 47 | 65 | Crewe Alexandra | ... | 45.0 | 40.0 | 48.0 | 47.0 | 10.0 | 13.0 | 7.0 | 8.0 | 9.0 | €143K |
| 18203 | 18203 | 243165 | N. Christoffersson | 19 | https://cdn.sofifa.org/players/4/19/243165.png | Sweden | https://cdn.sofifa.org/flags/46.png | 47 | 63 | Trelleborgs FF | ... | 42.0 | 22.0 | 15.0 | 19.0 | 10.0 | 9.0 | 9.0 | 5.0 | 12.0 | €113K |
| 18204 | 18204 | 241638 | B. Worman | 16 | https://cdn.sofifa.org/players/4/19/241638.png | England | https://cdn.sofifa.org/flags/14.png | 47 | 67 | Cambridge United | ... | 41.0 | 32.0 | 13.0 | 11.0 | 6.0 | 5.0 | 10.0 | 6.0 | 13.0 | €165K |
| 18205 | 18205 | 246268 | D. Walker-Rice | 17 | https://cdn.sofifa.org/players/4/19/246268.png | England | https://cdn.sofifa.org/flags/14.png | 47 | 66 | Tranmere Rovers | ... | 46.0 | 20.0 | 25.0 | 27.0 | 14.0 | 6.0 | 14.0 | 8.0 | 9.0 | €143K |
| 18206 | 18206 | 246269 | G. Nugent | 16 | https://cdn.sofifa.org/players/4/19/246269.png | England | https://cdn.sofifa.org/flags/14.png | 46 | 66 | Tranmere Rovers | ... | 43.0 | 40.0 | 43.0 | 50.0 | 10.0 | 15.0 | 9.0 | 12.0 | 9.0 | €165K |
18207 rows × 89 columns
data.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 18207 entries, 0 to 18206 Data columns (total 89 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Unnamed: 0 18207 non-null int64 1 ID 18207 non-null int64 2 Name 18207 non-null object 3 Age 18207 non-null int64 4 Photo 18207 non-null object 5 Nationality 18207 non-null object 6 Flag 18207 non-null object 7 Overall 18207 non-null int64 8 Potential 18207 non-null int64 9 Club 17966 non-null object 10 Club Logo 18207 non-null object 11 Value 18207 non-null object 12 Wage 18207 non-null object 13 Special 18207 non-null int64 14 Preferred Foot 18159 non-null object 15 International Reputation 18159 non-null float64 16 Weak Foot 18159 non-null float64 17 Skill Moves 18159 non-null float64 18 Work Rate 18159 non-null object 19 Body Type 18159 non-null object 20 Real Face 18159 non-null object 21 Position 18147 non-null object 22 Jersey Number 18147 non-null float64 23 Joined 16654 non-null object 24 Loaned From 1264 non-null object 25 Contract Valid Until 17918 non-null object 26 Height 18159 non-null object 27 Weight 18159 non-null object 28 LS 16122 non-null object 29 ST 16122 non-null object 30 RS 16122 non-null object 31 LW 16122 non-null object 32 LF 16122 non-null object 33 CF 16122 non-null object 34 RF 16122 non-null object 35 RW 16122 non-null object 36 LAM 16122 non-null object 37 CAM 16122 non-null object 38 RAM 16122 non-null object 39 LM 16122 non-null object 40 LCM 16122 non-null object 41 CM 16122 non-null object 42 RCM 16122 non-null object 43 RM 16122 non-null object 44 LWB 16122 non-null object 45 LDM 16122 non-null object 46 CDM 16122 non-null object 47 RDM 16122 non-null object 48 RWB 16122 non-null object 49 LB 16122 non-null object 50 LCB 16122 non-null object 51 CB 16122 non-null object 52 RCB 16122 non-null object 53 RB 16122 non-null object 54 Crossing 18159 non-null float64 55 Finishing 18159 non-null float64 56 HeadingAccuracy 18159 non-null float64 57 ShortPassing 18159 non-null float64 58 Volleys 18159 non-null float64 59 Dribbling 18159 non-null float64 60 Curve 18159 non-null float64 61 FKAccuracy 18159 non-null float64 62 LongPassing 18159 non-null float64 63 BallControl 18159 non-null float64 64 Acceleration 18159 non-null float64 65 SprintSpeed 18159 non-null float64 66 Agility 18159 non-null float64 67 Reactions 18159 non-null float64 68 Balance 18159 non-null float64 69 ShotPower 18159 non-null float64 70 Jumping 18159 non-null float64 71 Stamina 18159 non-null float64 72 Strength 18159 non-null float64 73 LongShots 18159 non-null float64 74 Aggression 18159 non-null float64 75 Interceptions 18159 non-null float64 76 Positioning 18159 non-null float64 77 Vision 18159 non-null float64 78 Penalties 18159 non-null float64 79 Composure 18159 non-null float64 80 Marking 18159 non-null float64 81 StandingTackle 18159 non-null float64 82 SlidingTackle 18159 non-null float64 83 GKDiving 18159 non-null float64 84 GKHandling 18159 non-null float64 85 GKKicking 18159 non-null float64 86 GKPositioning 18159 non-null float64 87 GKReflexes 18159 non-null float64 88 Release Clause 16643 non-null object dtypes: float64(38), int64(6), object(45) memory usage: 12.4+ MB
# renaming columns
data.rename(columns = {'Unnamed: 0':'Duplicate'}, inplace = True)
# dropping duplicate columns
data.drop(['Photo','Flag','Club Logo','Duplicate'],axis=1,inplace=True)
data.shape
(18207, 85)
# data manipulation - columns conversion
data[['Value','Wage','Release Clause']]
| Value | Wage | Release Clause | |
|---|---|---|---|
| 0 | €110.5M | €565K | €226.5M |
| 1 | €77M | €405K | €127.1M |
| 2 | €118.5M | €290K | €228.1M |
| 3 | €72M | €260K | €138.6M |
| 4 | €102M | €355K | €196.4M |
| ... | ... | ... | ... |
| 18202 | €60K | €1K | €143K |
| 18203 | €60K | €1K | €113K |
| 18204 | €60K | €1K | €165K |
| 18205 | €60K | €1K | €143K |
| 18206 | €60K | €1K | €165K |
18207 rows × 3 columns
def Value_float(Value):
if isinstance(Value,str):
out = Value.replace('€', '')
if 'M' in out:
out = float(out.replace('M', ''))*1000000
elif 'K' in Value:
out = float(out.replace('K', ''))*1000
return float(out)
data['Value'] = data['Value'].apply(Value_float)
data['Wage'] = data['Wage'].apply(Value_float)
data['Release Clause'] = data['Release Clause'].apply(Value_float)
data[['Value','Wage','Release Clause']]
| Value | Wage | Release Clause | |
|---|---|---|---|
| 0 | 110500000.0 | 565000.0 | 226500000.0 |
| 1 | 77000000.0 | 405000.0 | 127100000.0 |
| 2 | 118500000.0 | 290000.0 | 228100000.0 |
| 3 | 72000000.0 | 260000.0 | 138600000.0 |
| 4 | 102000000.0 | 355000.0 | 196400000.0 |
| ... | ... | ... | ... |
| 18202 | 60000.0 | 1000.0 | 143000.0 |
| 18203 | 60000.0 | 1000.0 | 113000.0 |
| 18204 | 60000.0 | 1000.0 | 165000.0 |
| 18205 | 60000.0 | 1000.0 | 143000.0 |
| 18206 | 60000.0 | 1000.0 | 165000.0 |
18207 rows × 3 columns
# target column - release clause class
def income(x):
if x<1000000:
return 'low_income_class'
elif x>=1000000:
return 'high_income_class'
data["Release_Clause_Class"] = data["Release Clause"].apply(income)
data
| ID | Name | Age | Nationality | Overall | Potential | Club | Value | Wage | Special | ... | Marking | StandingTackle | SlidingTackle | GKDiving | GKHandling | GKKicking | GKPositioning | GKReflexes | Release Clause | Release_Clause_Class | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 158023 | L. Messi | 31 | Argentina | 94 | 94 | FC Barcelona | 110500000.0 | 565000.0 | 2202 | ... | 33.0 | 28.0 | 26.0 | 6.0 | 11.0 | 15.0 | 14.0 | 8.0 | 226500000.0 | high_income_class |
| 1 | 20801 | Cristiano Ronaldo | 33 | Portugal | 94 | 94 | Juventus | 77000000.0 | 405000.0 | 2228 | ... | 28.0 | 31.0 | 23.0 | 7.0 | 11.0 | 15.0 | 14.0 | 11.0 | 127100000.0 | high_income_class |
| 2 | 190871 | Neymar Jr | 26 | Brazil | 92 | 93 | Paris Saint-Germain | 118500000.0 | 290000.0 | 2143 | ... | 27.0 | 24.0 | 33.0 | 9.0 | 9.0 | 15.0 | 15.0 | 11.0 | 228100000.0 | high_income_class |
| 3 | 193080 | De Gea | 27 | Spain | 91 | 93 | Manchester United | 72000000.0 | 260000.0 | 1471 | ... | 15.0 | 21.0 | 13.0 | 90.0 | 85.0 | 87.0 | 88.0 | 94.0 | 138600000.0 | high_income_class |
| 4 | 192985 | K. De Bruyne | 27 | Belgium | 91 | 92 | Manchester City | 102000000.0 | 355000.0 | 2281 | ... | 68.0 | 58.0 | 51.0 | 15.0 | 13.0 | 5.0 | 10.0 | 13.0 | 196400000.0 | high_income_class |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 18202 | 238813 | J. Lundstram | 19 | England | 47 | 65 | Crewe Alexandra | 60000.0 | 1000.0 | 1307 | ... | 40.0 | 48.0 | 47.0 | 10.0 | 13.0 | 7.0 | 8.0 | 9.0 | 143000.0 | low_income_class |
| 18203 | 243165 | N. Christoffersson | 19 | Sweden | 47 | 63 | Trelleborgs FF | 60000.0 | 1000.0 | 1098 | ... | 22.0 | 15.0 | 19.0 | 10.0 | 9.0 | 9.0 | 5.0 | 12.0 | 113000.0 | low_income_class |
| 18204 | 241638 | B. Worman | 16 | England | 47 | 67 | Cambridge United | 60000.0 | 1000.0 | 1189 | ... | 32.0 | 13.0 | 11.0 | 6.0 | 5.0 | 10.0 | 6.0 | 13.0 | 165000.0 | low_income_class |
| 18205 | 246268 | D. Walker-Rice | 17 | England | 47 | 66 | Tranmere Rovers | 60000.0 | 1000.0 | 1228 | ... | 20.0 | 25.0 | 27.0 | 14.0 | 6.0 | 14.0 | 8.0 | 9.0 | 143000.0 | low_income_class |
| 18206 | 246269 | G. Nugent | 16 | England | 46 | 66 | Tranmere Rovers | 60000.0 | 1000.0 | 1321 | ... | 40.0 | 43.0 | 50.0 | 10.0 | 15.0 | 9.0 | 12.0 | 9.0 | 165000.0 | low_income_class |
18207 rows × 86 columns
data["Release Clause"].min()
13000.0
data["Release Clause"].max()
228100000.0
data["Release_Clause_Class"].value_counts()
high_income_class 9150 low_income_class 7493 Name: Release_Clause_Class, dtype: int64
sns.countplot(x="Release_Clause_Class",data=data)
plt.show()
data["Release_Clause_Class"] = data["Release_Clause_Class"].map({"low_income_class":0,
"high_income_class":1})
data.head()
| ID | Name | Age | Nationality | Overall | Potential | Club | Value | Wage | Special | ... | Marking | StandingTackle | SlidingTackle | GKDiving | GKHandling | GKKicking | GKPositioning | GKReflexes | Release Clause | Release_Clause_Class | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 158023 | L. Messi | 31 | Argentina | 94 | 94 | FC Barcelona | 110500000.0 | 565000.0 | 2202 | ... | 33.0 | 28.0 | 26.0 | 6.0 | 11.0 | 15.0 | 14.0 | 8.0 | 226500000.0 | 1.0 |
| 1 | 20801 | Cristiano Ronaldo | 33 | Portugal | 94 | 94 | Juventus | 77000000.0 | 405000.0 | 2228 | ... | 28.0 | 31.0 | 23.0 | 7.0 | 11.0 | 15.0 | 14.0 | 11.0 | 127100000.0 | 1.0 |
| 2 | 190871 | Neymar Jr | 26 | Brazil | 92 | 93 | Paris Saint-Germain | 118500000.0 | 290000.0 | 2143 | ... | 27.0 | 24.0 | 33.0 | 9.0 | 9.0 | 15.0 | 15.0 | 11.0 | 228100000.0 | 1.0 |
| 3 | 193080 | De Gea | 27 | Spain | 91 | 93 | Manchester United | 72000000.0 | 260000.0 | 1471 | ... | 15.0 | 21.0 | 13.0 | 90.0 | 85.0 | 87.0 | 88.0 | 94.0 | 138600000.0 | 1.0 |
| 4 | 192985 | K. De Bruyne | 27 | Belgium | 91 | 92 | Manchester City | 102000000.0 | 355000.0 | 2281 | ... | 68.0 | 58.0 | 51.0 | 15.0 | 13.0 | 5.0 | 10.0 | 13.0 | 196400000.0 | 1.0 |
5 rows × 86 columns
# converting columns
data['Joined'].isna().sum()
1553
data['Joined']=data['Joined'].fillna('0')
data['Joined'].isna().sum()
0
data['Joined'].head()
0 Jul 1, 2004 1 Jul 10, 2018 2 Aug 3, 2017 3 Jul 1, 2011 4 Aug 30, 2015 Name: Joined, dtype: object
def str4(x):
if isinstance(x,str):
return int(x[-4:])
data['Joined'] = data['Joined'].apply(str4)
data['Joined'].head()
0 2004 1 2018 2 2017 3 2011 4 2015 Name: Joined, dtype: int64
data['Contract Valid Until'].head()
0 2021 1 2022 2 2022 3 2020 4 2023 Name: Contract Valid Until, dtype: object
data['Contract Valid Until']= pd.to_datetime(data['Contract Valid Until'])
data['Contract Valid Until'].head()
0 2021-01-01 1 2022-01-01 2 2022-01-01 3 2020-01-01 4 2023-01-01 Name: Contract Valid Until, dtype: datetime64[ns]
data['Height'].head()
0 5'7 1 6'2 2 5'9 3 6'4 4 5'11 Name: Height, dtype: object
def feet(x):
l=[]
if isinstance(x,str):
l = x.split("'")
i = int(l[0])
j = int(l[1])
f = ((i*12)+j)/12
return f
data['Height'] = data['Height'].apply(feet)
data['Height'].head()
0 5.583333 1 6.166667 2 5.750000 3 6.333333 4 5.916667 Name: Height, dtype: float64
data['Weight'].head()
0 159lbs 1 183lbs 2 150lbs 3 168lbs 4 154lbs Name: Weight, dtype: object
def llbs(x):
l=[]
if isinstance(x,str):
l = x.split("lbs")
i = l[0]
return float(i)
data['Weight'] = data['Weight'].apply(llbs)
data['Weight'].head()
0 159.0 1 183.0 2 150.0 3 168.0 4 154.0 Name: Weight, dtype: float64
# filling null values with mean
data['Weight'].fillna(data['Weight'].mean(), inplace = True)
data['ShortPassing'].fillna(data['ShortPassing'].mean(), inplace = True)
data['Volleys'].fillna(data['Volleys'].mean(), inplace = True)
data['Dribbling'].fillna(data['Dribbling'].mean(), inplace = True)
data['Curve'].fillna(data['Curve'].mean(), inplace = True)
data['FKAccuracy'].fillna(data['FKAccuracy'].mean(), inplace = True)
data['LongPassing'].fillna(data['LongPassing'].mean(), inplace = True)
data['BallControl'].fillna(data['BallControl'].mean(), inplace = True)
data['HeadingAccuracy'].fillna(data['HeadingAccuracy'].mean(), inplace = True)
data['Finishing'].fillna(data['Finishing'].mean(), inplace = True)
data['Crossing'].fillna(data['Crossing'].mean(), inplace = True)
data['Height'].fillna(data['Height'].mean(), inplace = True)
data['Joined'].fillna(data['Joined'].mean(), inplace = True)
# total number of unique positions and tables
data['Position'] = data['Position'].replace(np.nan, 0)
pos = list(data['Position'].unique())
print('Total number of unique positions:', len(pos)); print()
print('Positions:', pos)
Total number of unique positions: 28 Positions: ['RF', 'ST', 'LW', 'GK', 'RCM', 'LF', 'RS', 'RCB', 'LCM', 'CB', 'LDM', 'CAM', 'CDM', 'LS', 'LCB', 'RM', 'LAM', 'LM', 'LB', 'RDM', 'RW', 'CM', 'RB', 'RAM', 'CF', 'RWB', 'LWB', 0]
positiongroup = data.groupby(data['Position'])
tables = list()
for i in pos:
a = positiongroup.get_group(i).sort_values('Overall',ascending=0).head(5)
tables.append(a)
print('Total no. of tables:',len(tables))
Total no. of tables: 28
# repetition of players
idlist = list()
for i in range(0,len(pos)):
a = tables[i]['ID']
idlist.append(set(a.values))
ComIDs = []; x=0
while x<28:
l = []; y=27
while y>=0:
if x != y:
set1 = idlist[x].intersection(idlist[y])
l.extend(list(set1))
l = list(set(l))
y -= 1
ComIDs.extend(l)
ComIDs = list(set(ComIDs))
x += 1
if len(ComIDs)==0:
print('There are no players appearing in more than one table.')
else:
print('Players appearing in more than one table are:{}'.format(df1[df1['ID'] in ComIDs]))
There are no players appearing in more than one table.
# average wage for players in the corresponding tables
avgwage = list()
for i in range(0,len(pos)):
m = tables[i]['Wage'].mean()
avgwage.append(m)
print('Average wage for the top 5 in {}: {}'.format(pos[i],avgwage[i]))
Average wage for the top 5 in RF: 148000.0 Average wage for the top 5 in ST: 294000.0 Average wage for the top 5 in LW: 261000.0 Average wage for the top 5 in GK: 192800.0 Average wage for the top 5 in RCM: 240800.0 Average wage for the top 5 in LF: 121200.0 Average wage for the top 5 in RS: 132200.0 Average wage for the top 5 in RCB: 231000.0 Average wage for the top 5 in LCM: 184400.0 Average wage for the top 5 in CB: 139600.0 Average wage for the top 5 in LDM: 126600.0 Average wage for the top 5 in CAM: 174000.0 Average wage for the top 5 in CDM: 217000.0 Average wage for the top 5 in LS: 130200.0 Average wage for the top 5 in LCB: 162000.0 Average wage for the top 5 in RM: 131400.0 Average wage for the top 5 in LAM: 81600.0 Average wage for the top 5 in LM: 164600.0 Average wage for the top 5 in LB: 177200.0 Average wage for the top 5 in RDM: 105000.0 Average wage for the top 5 in RW: 202000.0 Average wage for the top 5 in CM: 130600.0 Average wage for the top 5 in RB: 155400.0 Average wage for the top 5 in RAM: 45400.0 Average wage for the top 5 in CF: 47400.0 Average wage for the top 5 in RWB: 44200.0 Average wage for the top 5 in LWB: 34200.0 Average wage for the top 5 in 0: 0.0
top = data[data['Contract Valid Until']=='2020-01-01'].sort_values('Overall',ascending=0).head(20)
top
| ID | Name | Age | Nationality | Overall | Potential | Club | Value | Wage | Special | ... | Marking | StandingTackle | SlidingTackle | GKDiving | GKHandling | GKKicking | GKPositioning | GKReflexes | Release Clause | Release_Clause_Class | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 3 | 193080 | De Gea | 27 | Spain | 91 | 93 | Manchester United | 72000000.0 | 260000.0 | 1471 | ... | 15.0 | 21.0 | 13.0 | 90.0 | 85.0 | 87.0 | 88.0 | 94.0 | 138600000.0 | 1.0 |
| 6 | 177003 | L. Modrić | 32 | Croatia | 91 | 91 | Real Madrid | 67000000.0 | 420000.0 | 2280 | ... | 60.0 | 76.0 | 73.0 | 13.0 | 9.0 | 7.0 | 14.0 | 9.0 | 137400000.0 | 1.0 |
| 8 | 155862 | Sergio Ramos | 32 | Spain | 91 | 91 | Real Madrid | 51000000.0 | 380000.0 | 2201 | ... | 87.0 | 92.0 | 91.0 | 11.0 | 8.0 | 9.0 | 7.0 | 11.0 | 104600000.0 | 1.0 |
| 5 | 183277 | E. Hazard | 27 | Belgium | 91 | 91 | Chelsea | 93000000.0 | 340000.0 | 2142 | ... | 34.0 | 27.0 | 22.0 | 11.0 | 12.0 | 6.0 | 8.0 | 8.0 | 172100000.0 | 1.0 |
| 13 | 168542 | David Silva | 32 | Spain | 90 | 90 | Manchester City | 60000000.0 | 285000.0 | 2115 | ... | 59.0 | 53.0 | 29.0 | 6.0 | 15.0 | 7.0 | 6.0 | 12.0 | 111000000.0 | 1.0 |
| 21 | 179813 | E. Cavani | 31 | Uruguay | 89 | 89 | Paris Saint-Germain | 60000000.0 | 200000.0 | 2161 | ... | 52.0 | 45.0 | 39.0 | 12.0 | 5.0 | 13.0 | 13.0 | 10.0 | 111000000.0 | 1.0 |
| 24 | 138956 | G. Chiellini | 33 | Italy | 89 | 89 | Juventus | 27000000.0 | 215000.0 | 1841 | ... | 93.0 | 93.0 | 90.0 | 3.0 | 3.0 | 2.0 | 4.0 | 3.0 | 44600000.0 | 1.0 |
| 31 | 190460 | C. Eriksen | 26 | Denmark | 88 | 91 | Tottenham Hotspur | 73500000.0 | 205000.0 | 2117 | ... | 59.0 | 57.0 | 22.0 | 9.0 | 14.0 | 7.0 | 7.0 | 6.0 | 141500000.0 | 1.0 |
| 39 | 164240 | Thiago Silva | 33 | Brazil | 88 | 88 | Paris Saint-Germain | 24000000.0 | 165000.0 | 2077 | ... | 88.0 | 89.0 | 85.0 | 9.0 | 12.0 | 5.0 | 9.0 | 10.0 | 44400000.0 | 1.0 |
| 50 | 175943 | D. Mertens | 31 | Belgium | 87 | 87 | Napoli | 45000000.0 | 135000.0 | 2043 | ... | 25.0 | 40.0 | 40.0 | 7.0 | 10.0 | 8.0 | 10.0 | 4.0 | 76500000.0 | 1.0 |
| 52 | 171877 | M. Hamšík | 30 | Slovakia | 87 | 87 | Napoli | 46500000.0 | 125000.0 | 2188 | ... | 75.0 | 73.0 | 62.0 | 8.0 | 6.0 | 4.0 | 14.0 | 14.0 | 79100000.0 | 1.0 |
| 49 | 189332 | Jordi Alba | 29 | Spain | 87 | 87 | FC Barcelona | 38000000.0 | 250000.0 | 2230 | ... | 72.0 | 84.0 | 85.0 | 13.0 | 15.0 | 13.0 | 6.0 | 13.0 | 77900000.0 | 1.0 |
| 46 | 193041 | K. Navas | 31 | Costa Rica | 87 | 87 | Real Madrid | 30500000.0 | 195000.0 | 1345 | ... | 28.0 | 14.0 | 14.0 | 90.0 | 81.0 | 75.0 | 82.0 | 90.0 | 62500000.0 | 1.0 |
| 64 | 191043 | Alex Sandro | 27 | Brazil | 86 | 86 | Juventus | 36500000.0 | 160000.0 | 2198 | ... | 81.0 | 84.0 | 84.0 | 7.0 | 7.0 | 9.0 | 12.0 | 5.0 | 60200000.0 | 1.0 |
| 71 | 184087 | T. Alderweireld | 29 | Belgium | 86 | 87 | Tottenham Hotspur | 39000000.0 | 150000.0 | 2047 | ... | 90.0 | 91.0 | 86.0 | 16.0 | 6.0 | 14.0 | 16.0 | 14.0 | 75100000.0 | 1.0 |
| 73 | 177509 | M. Benatia | 31 | Morocco | 86 | 86 | Juventus | 30000000.0 | 160000.0 | 1803 | ... | 89.0 | 87.0 | 85.0 | 7.0 | 4.0 | 8.0 | 7.0 | 11.0 | 49500000.0 | 1.0 |
| 75 | 135507 | Fernandinho | 33 | Brazil | 86 | 86 | Manchester City | 18000000.0 | 185000.0 | 2183 | ... | 85.0 | 85.0 | 80.0 | 12.0 | 11.0 | 5.0 | 13.0 | 7.0 | 33300000.0 | 1.0 |
| 102 | 171919 | Naldo | 35 | Brazil | 85 | 85 | FC Schalke 04 | 9000000.0 | 38000.0 | 1959 | ... | 86.0 | 88.0 | 85.0 | 14.0 | 10.0 | 14.0 | 8.0 | 14.0 | 15300000.0 | 1.0 |
| 104 | 168609 | Miranda | 33 | Brazil | 85 | 85 | Inter | 15500000.0 | 96000.0 | 1879 | ... | 90.0 | 90.0 | 85.0 | 12.0 | 6.0 | 10.0 | 13.0 | 12.0 | 26400000.0 | 1.0 |
| 103 | 170890 | B. Matuidi | 31 | France | 85 | 85 | Juventus | 26000000.0 | 145000.0 | 2196 | ... | 85.0 | 84.0 | 86.0 | 8.0 | 11.0 | 5.0 | 10.0 | 14.0 | 42900000.0 | 1.0 |
20 rows × 86 columns
# average wage
avg = top['Wage'].mean(); print('Average wage for this set of players is:', avg)
# average age
age = top['Age'].mean(); print('Average age is:', age)
# correlation
top['Overall'].corr(top['Value'])
Average wage for this set of players is: 205450.0 Average age is: 30.65
0.7867166042074152
EXPLORATORY DATA ANALYSIS
data2 = data.loc[data.Nationality=='Spain', 'International Reputation']
data2.value_counts()
1.0 932 2.0 94 3.0 36 4.0 9 Name: International Reputation, dtype: int64
data2.value_counts().plot(kind='pie', autopct='%1.2f%%', explode=[0,0,0.4,0.5])
plt.show()
data['Jersey Number'] = data['Jersey Number'].values.astype(int)
dfcorr = data[['Age', 'Overall', 'Potential', 'Finishing', 'ShotPower', 'LongShots', 'Penalties', 'FKAccuracy']].corr()
cmap = sns.color_palette('viridis')
f = sns.heatmap(dfcorr, annot = True, cmap = cmap)
f.set_title('Correlation of Shooting Attributes of Football Players', fontsize = 12)
plt.show()
d = sns.pairplot(data[['Age', 'Overall', 'Potential', 'Finishing', 'ShotPower', 'LongShots', 'Penalties', 'FKAccuracy']]);
d.fig.suptitle('Data distribution and correlation')
plt.show()
n = sns.distplot(data[data['Nationality'] == 'Brazil']['Potential']);
n.set_title('Potential for player in the league', fontsize = 12)
plt.show()
n = sns.countplot(data[data['Nationality'] == 'Portugal']['Overall'], palette='ocean_r');
n.set_title('Potential for player in the league(Portugal)', fontsize = 12)
plt.show()
columns=data[['Positioning', 'Finishing', 'ShotPower', 'LongShots', 'Penalties', 'Volleys']]
att_corr = columns.corr()
s = sns.heatmap(att_corr, annot = True)
s.set_title('Heat map for shooting ability')
plt.show()
from wordcloud import WordCloud
#the name of the club
wordcloud = WordCloud(
background_color='lavenderblush',
width=1900,
height=1080
).generate(" ".join(data.Club[0:30]))
plt.imshow(wordcloud)
plt.show()
wordcloud = WordCloud(
background_color='aliceblue',
width=1900,
height=1080
).generate(" ".join(data.Name[0:30]))
plt.imshow(wordcloud)
plt.show()
sns.set(rc={"figure.figsize": (8, 6)})
plt.title("Overall ratings distribution")
ax = sns.distplot(data["Overall"],color="green",bins=10,kde=True)
plt.show()
# we observe that most of the players have mean ratings of 65 as most of the values are
# concentrated towards the neam and the graph follows a normal distribution by forming a bell
# shaped curve.
sns.set(rc={"figure.figsize": (20,10)})
sns.boxplot(x="Club", y="Age", data=top)
plt.title("Age group of different clubs")
plt.show()
sns.set(rc={"figure.figsize": (8, 6)})
plt.title("Distribution plot for Age Players ")
x=sns.distplot(x=data["Age"],color="green",bins=20,kde=True)
print("age of oldest player is : ", data["Age"].max())
print("age of youngest player is : ", data["Age"].min())
print("average age of players is : ", data["Age"].mean())
plt.show()
age of oldest player is : 45 age of youngest player is : 16 average age of players is : 25.122205745043114
sns.countplot("Nationality", data=data, order=data["Nationality"].value_counts().index[:5])
plt.title("Top 5 Countries that have maximum number of player participating in FIFA")
plt.show()
data["Nationality"].value_counts().head()
England 1662 Germany 1198 Spain 1072 Argentina 937 France 914 Name: Nationality, dtype: int64
sns.countplot("Preferred Foot", data=data)
plt.show()
plt.subplots(figsize=(20,8))
sns.countplot("Position", data=data, order=data["Position"].value_counts().index)
plt.xticks(rotation=45)
plt.title("Number of Positions")
plt.show()
plt.subplots(figsize=(20,8))
sns.countplot(x="Position",hue="Preferred Foot", data=data,order=data["Position"].value_counts().index)
plt.xticks(rotation=45)
plt.title("Preferred Foot for each position")
plt.show()
unique_counts = data.nunique()
lowest_distinct = unique_counts.min()
lowest_distinct_columns = unique_counts[unique_counts == lowest_distinct].index.tolist()
result = pd.DataFrame({col: data[col].unique() for col in lowest_distinct_columns})
result
| Preferred Foot | Real Face | Release_Clause_Class | |
|---|---|---|---|
| 0 | Left | Yes | 1.0 |
| 1 | Right | No | NaN |
| 2 | NaN | NaN | 0.0 |
data = data.dropna(subset=['Preferred Foot', 'Real Face', 'Release Clause'], how='all')
def height(x):
if x>=5 and x<=6:
return 'short'
elif x>6:
return 'tall'
data["height_group"] = data["Height"].apply(height)
def w(x):
if x>=100 and x<=150:
return 'under'
elif x>150 and x<=200:
return 'average'
elif x>200:
return 'over'
data["weight_group"] = data["Weight"].apply(w)
data
| ID | Name | Age | Nationality | Overall | Potential | Club | Value | Wage | Special | ... | SlidingTackle | GKDiving | GKHandling | GKKicking | GKPositioning | GKReflexes | Release Clause | Release_Clause_Class | height_group | weight_group | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 158023 | L. Messi | 31 | Argentina | 94 | 94 | FC Barcelona | 110500000.0 | 565000.0 | 2202 | ... | 26.0 | 6.0 | 11.0 | 15.0 | 14.0 | 8.0 | 226500000.0 | 1.0 | short | average |
| 1 | 20801 | Cristiano Ronaldo | 33 | Portugal | 94 | 94 | Juventus | 77000000.0 | 405000.0 | 2228 | ... | 23.0 | 7.0 | 11.0 | 15.0 | 14.0 | 11.0 | 127100000.0 | 1.0 | tall | average |
| 2 | 190871 | Neymar Jr | 26 | Brazil | 92 | 93 | Paris Saint-Germain | 118500000.0 | 290000.0 | 2143 | ... | 33.0 | 9.0 | 9.0 | 15.0 | 15.0 | 11.0 | 228100000.0 | 1.0 | short | under |
| 3 | 193080 | De Gea | 27 | Spain | 91 | 93 | Manchester United | 72000000.0 | 260000.0 | 1471 | ... | 13.0 | 90.0 | 85.0 | 87.0 | 88.0 | 94.0 | 138600000.0 | 1.0 | tall | average |
| 4 | 192985 | K. De Bruyne | 27 | Belgium | 91 | 92 | Manchester City | 102000000.0 | 355000.0 | 2281 | ... | 51.0 | 15.0 | 13.0 | 5.0 | 10.0 | 13.0 | 196400000.0 | 1.0 | short | average |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 18202 | 238813 | J. Lundstram | 19 | England | 47 | 65 | Crewe Alexandra | 60000.0 | 1000.0 | 1307 | ... | 47.0 | 10.0 | 13.0 | 7.0 | 8.0 | 9.0 | 143000.0 | 0.0 | short | under |
| 18203 | 243165 | N. Christoffersson | 19 | Sweden | 47 | 63 | Trelleborgs FF | 60000.0 | 1000.0 | 1098 | ... | 19.0 | 10.0 | 9.0 | 9.0 | 5.0 | 12.0 | 113000.0 | 0.0 | tall | average |
| 18204 | 241638 | B. Worman | 16 | England | 47 | 67 | Cambridge United | 60000.0 | 1000.0 | 1189 | ... | 11.0 | 6.0 | 5.0 | 10.0 | 6.0 | 13.0 | 165000.0 | 0.0 | short | under |
| 18205 | 246268 | D. Walker-Rice | 17 | England | 47 | 66 | Tranmere Rovers | 60000.0 | 1000.0 | 1228 | ... | 27.0 | 14.0 | 6.0 | 14.0 | 8.0 | 9.0 | 143000.0 | 0.0 | short | average |
| 18206 | 246269 | G. Nugent | 16 | England | 46 | 66 | Tranmere Rovers | 60000.0 | 1000.0 | 1321 | ... | 50.0 | 10.0 | 15.0 | 9.0 | 12.0 | 9.0 | 165000.0 | 0.0 | short | average |
18159 rows × 88 columns
data["Height"].min()
5.083333333333333
data["Height"].max()
6.75
data[data["Height"]==5.083333333333333]
| ID | Name | Age | Nationality | Overall | Potential | Club | Value | Wage | Special | ... | SlidingTackle | GKDiving | GKHandling | GKKicking | GKPositioning | GKReflexes | Release Clause | Release_Clause_Class | height_group | weight_group | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 6275 | 237994 | N. Barrios | 20 | Argentina | 69 | 82 | San Lorenzo de Almagro | 2000000.0 | 7000.0 | 1687 | ... | 51.0 | 13.0 | 8.0 | 7.0 | 12.0 | 6.0 | 4300000.0 | 1.0 | short | under |
| 6584 | 232951 | H. Nakagawa | 23 | Japan | 69 | 72 | Kashiwa Reysol | 1300000.0 | 5000.0 | 1740 | ... | 31.0 | 15.0 | 10.0 | 7.0 | 12.0 | 13.0 | 1800000.0 | 1.0 | short | under |
| 17157 | 242566 | K. Yamaguchi | 23 | Japan | 55 | 61 | Shonan Bellmare | 120000.0 | 1000.0 | 1383 | ... | 13.0 | 14.0 | 5.0 | 15.0 | 9.0 | 14.0 | 174000.0 | 0.0 | short | under |
3 rows × 88 columns
data[data["Height"]==6.75]
| ID | Name | Age | Nationality | Overall | Potential | Club | Value | Wage | Special | ... | SlidingTackle | GKDiving | GKHandling | GKKicking | GKPositioning | GKReflexes | Release Clause | Release_Clause_Class | height_group | weight_group | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 11614 | 199321 | T. Holý | 26 | Czech Republic | 64 | 68 | Gillingham | 400000.0 | 2000.0 | 1028 | ... | 25.0 | 68.0 | 58.0 | 57.0 | 63.0 | 68.0 | 730000.0 | 0.0 | tall | over |
| 17927 | 243796 | D. Hodzic | 22 | Croatia | 51 | 61 | Vejle Boldklub | 50000.0 | 1000.0 | 793 | ... | 11.0 | 50.0 | 47.0 | 56.0 | 52.0 | 58.0 | 73000.0 | 0.0 | tall | average |
2 rows × 88 columns
data["Weight"].min()
110.0
data["Weight"].max()
243.0
data[data["Weight"]==110.0]
| ID | Name | Age | Nationality | Overall | Potential | Club | Value | Wage | Special | ... | SlidingTackle | GKDiving | GKHandling | GKKicking | GKPositioning | GKReflexes | Release Clause | Release_Clause_Class | height_group | weight_group | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 13338 | 235600 | B. Al Mutairi | 28 | Saudi Arabia | 62 | 62 | Al Batin | 240000.0 | 3000.0 | 1522 | ... | 56.0 | 7.0 | 6.0 | 13.0 | 8.0 | 9.0 | 396000.0 | 0.0 | short | under |
| 17157 | 242566 | K. Yamaguchi | 23 | Japan | 55 | 61 | Shonan Bellmare | 120000.0 | 1000.0 | 1383 | ... | 13.0 | 14.0 | 5.0 | 15.0 | 9.0 | 14.0 | 174000.0 | 0.0 | short | under |
2 rows × 88 columns
data[data["Weight"]==243.0]
| ID | Name | Age | Nationality | Overall | Potential | Club | Value | Wage | Special | ... | SlidingTackle | GKDiving | GKHandling | GKKicking | GKPositioning | GKReflexes | Release Clause | Release_Clause_Class | height_group | weight_group | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 9501 | 156321 | A. Akinfenwa | 36 | England | 66 | 66 | Wycombe Wanderers | 230000.0 | 2000.0 | 1586 | ... | 24.0 | 14.0 | 7.0 | 15.0 | 6.0 | 16.0 | 403000.0 | 0.0 | short | over |
1 rows × 88 columns
sns.scatterplot(x=data["Overall"],y=data["Potential"],data=data)
plt.show()
sns.scatterplot(x=data["Age"],y=data["Weight"],hue="weight_group",data=data)
plt.show()
sns.scatterplot(x=data["Age"],y=data["Height"],hue="height_group",data=data)
plt.show()
sns.jointplot(x="Value", y='Release Clause', data=data, kind="reg")
plt.show()
HYPOTHESIS TESTING USING ANOVA MODEL
# STATISTICAL ANALYSIS FOR SIGNIFICANT COLUMNS WITH THE TARGET VARIABLE
data = data[data['Release Clause'].notna()]
data=data.drop(['Name',"Contract Valid Until",'Nationality','Club','Loaned From'],axis=1)
data.rename(columns = {'Release Clause':'Release_Clause',
'International Reputation':'International_Reputation',
'Preferred Foot':'Preferred_Foot','Work Rate':'Work_Rate','Body Type':'Body_Type',
'Real Face':'Real_Face'}, inplace = True)
#Hypothesis :
#Null Hypothesis : all columns have no relation with target column release clause.
#Alternative Hypothesis : all columns have relation with target column release clause.
import statsmodels.api as sm
from statsmodels.formula.api import ols
alpha=0.05
formula="Release_Clause~Preferred_Foot+Work_Rate+Body_Type+Real_Face+LS+ST+RS+LW+LF+CF+RF+RW+LAM+CAM+RAM+LM+LCM+CM+RCM+RM+LWB+LDM+CDM+RDM+RWB+LB+LCB+CB+RCB+RB"
model=ols(formula,data).fit()
anova_table=sm.stats.anova_lm(model)
anova_table
| df | sum_sq | mean_sq | F | PR(>F) | |
|---|---|---|---|---|---|
| Preferred_Foot | 1.0 | 9.211936e+13 | 9.211936e+13 | 12.270606 | 4.615968e-04 |
| Work_Rate | 8.0 | 6.436400e+16 | 8.045500e+15 | 1071.687445 | 0.000000e+00 |
| Body_Type | 9.0 | 1.434740e+17 | 1.594156e+16 | 2123.468358 | 0.000000e+00 |
| Real_Face | 1.0 | 3.323812e+17 | 3.323812e+17 | 44274.280951 | 0.000000e+00 |
| LS | 92.0 | 7.522404e+17 | 8.176527e+15 | 1089.140618 | 0.000000e+00 |
| ST | 92.0 | 1.411177e+17 | 1.533888e+15 | 204.318998 | 0.000000e+00 |
| RS | 92.0 | 5.935478e+16 | 6.451606e+14 | 85.937551 | 0.000000e+00 |
| LW | 104.0 | 1.153188e+17 | 1.108834e+15 | 147.700438 | 0.000000e+00 |
| LF | 101.0 | 2.325585e+16 | 2.302559e+14 | 30.670859 | 0.000000e+00 |
| CF | 101.0 | 1.603498e+16 | 1.587622e+14 | 21.147657 | 0.000000e+00 |
| RF | 101.0 | 1.254903e+16 | 1.242478e+14 | 16.550226 | 8.715449e-265 |
| RW | 104.0 | 1.951274e+16 | 1.876225e+14 | 24.991945 | 0.000000e+00 |
| LAM | 99.0 | 2.061221e+16 | 2.082041e+14 | 27.733482 | 0.000000e+00 |
| CAM | 99.0 | 5.757515e+15 | 5.815671e+13 | 7.746668 | 1.890903e-100 |
| RAM | 99.0 | 4.539872e+15 | 4.585729e+13 | 6.108344 | 1.217663e-71 |
| LM | 99.0 | 2.494914e+16 | 2.520115e+14 | 33.568768 | 0.000000e+00 |
| LCM | 91.0 | 1.247562e+16 | 1.370947e+14 | 18.261477 | 5.449000e-269 |
| CM | 91.0 | 3.627437e+15 | 3.986194e+13 | 5.309744 | 1.410508e-53 |
| RCM | 91.0 | 3.020967e+15 | 3.319744e+13 | 4.422010 | 5.523913e-40 |
| RM | 99.0 | 3.987367e+15 | 4.027644e+13 | 5.364956 | 6.666580e-59 |
| LWB | 94.0 | 2.530394e+16 | 2.691909e+14 | 35.857124 | 0.000000e+00 |
| LDM | 98.0 | 2.352521e+16 | 2.400532e+14 | 31.975881 | 0.000000e+00 |
| CDM | 98.0 | 2.025530e+15 | 2.066868e+13 | 2.753137 | 9.590235e-18 |
| RDM | 98.0 | 1.445686e+15 | 1.475190e+13 | 1.965002 | 4.633087e-08 |
| RWB | 94.0 | 1.684679e+15 | 1.792212e+13 | 2.387286 | 1.556629e-12 |
| LB | 97.0 | 6.937115e+15 | 7.151665e+13 | 9.526256 | 7.100463e-130 |
| LCB | 107.0 | 1.069029e+16 | 9.990925e+13 | 13.308245 | 3.320728e-216 |
| CB | 107.0 | 1.156577e+15 | 1.080913e+13 | 1.439812 | 2.064879e-03 |
| RCB | 107.0 | 1.231660e+15 | 1.151084e+13 | 1.533283 | 3.472934e-04 |
| RB | 97.0 | 1.584876e+15 | 1.633893e+13 | 2.176400 | 2.401565e-10 |
| Residual | 13880.0 | 1.042016e+17 | 7.507319e+12 | NaN | NaN |
data.rename(columns = {'Skill Moves':'Skill_Moves',
'Jersey Number':'Jersey_Number',"Weak Foot":"Weak_Foot"}, inplace = True)
formula="Release_Clause~ID+Age+Overall+Potential+Value+Wage+Special+International_Reputation+Weak_Foot+Skill_Moves+Jersey_Number+Joined+Height+Weight+Crossing+Finishing+HeadingAccuracy+ShortPassing+Volleys+Dribbling+Curve+FKAccuracy+LongPassing+BallControl+Acceleration+SprintSpeed+Agility+Reactions+Balance+ShotPower+Jumping+Stamina+Strength+LongShots+Aggression+Interceptions+Positioning+Vision+Penalties+Composure+Marking+StandingTackle+SlidingTackle+GKDiving+GKHandling+GKKicking+GKPositioning+GKReflexes+Release_Clause+Release_Clause_Class"
model1=ols(formula,data).fit()
anova_table=sm.stats.anova_lm(model1)
anova_table
| df | sum_sq | mean_sq | F | PR(>F) | |
|---|---|---|---|---|---|
| ID | 1.0 | 3.186724e+16 | 3.186724e+16 | 1.887392e+30 | 0.000000 |
| Age | 1.0 | 4.347045e+15 | 4.347045e+15 | 2.574612e+29 | 0.000000 |
| Overall | 1.0 | 8.811104e+17 | 8.811104e+17 | 5.218527e+31 | 0.000000 |
| Potential | 1.0 | 3.935338e+15 | 3.935338e+15 | 2.330771e+29 | 0.000000 |
| Value | 1.0 | 1.113597e+18 | 1.113597e+18 | 6.595466e+31 | 0.000000 |
| Wage | 1.0 | 1.367961e+12 | 1.367961e+12 | 8.101983e+25 | 0.000000 |
| Special | 1.0 | 6.879636e+12 | 6.879636e+12 | 4.074582e+26 | 0.000000 |
| International_Reputation | 1.0 | 4.767015e+14 | 4.767015e+14 | 2.823346e+28 | 0.000000 |
| Weak_Foot | 1.0 | 6.982612e+12 | 6.982612e+12 | 4.135571e+26 | 0.000000 |
| Skill_Moves | 1.0 | 2.099998e+12 | 2.099998e+12 | 1.243760e+26 | 0.000000 |
| Jersey_Number | 1.0 | 9.371773e+11 | 9.371773e+11 | 5.550593e+25 | 0.000000 |
| Joined | 1.0 | 1.409548e+12 | 1.409548e+12 | 8.348290e+25 | 0.000000 |
| Height | 1.0 | 3.852702e+11 | 3.852702e+11 | 2.281829e+25 | 0.000000 |
| Weight | 1.0 | 9.899069e+12 | 9.899069e+12 | 5.862893e+26 | 0.000000 |
| Crossing | 1.0 | 1.273477e+13 | 1.273477e+13 | 7.542386e+26 | 0.000000 |
| Finishing | 1.0 | 2.577620e+11 | 2.577620e+11 | 1.526640e+25 | 0.000000 |
| HeadingAccuracy | 1.0 | 2.057215e+12 | 2.057215e+12 | 1.218421e+26 | 0.000000 |
| ShortPassing | 1.0 | 4.869222e+12 | 4.869222e+12 | 2.883880e+26 | 0.000000 |
| Volleys | 1.0 | 4.935939e+13 | 4.935939e+13 | 2.923394e+27 | 0.000000 |
| Dribbling | 1.0 | 8.299397e+11 | 8.299397e+11 | 4.915459e+25 | 0.000000 |
| Curve | 1.0 | 2.385555e+12 | 2.385555e+12 | 1.412886e+26 | 0.000000 |
| FKAccuracy | 1.0 | 4.133019e+12 | 4.133019e+12 | 2.447851e+26 | 0.000000 |
| LongPassing | 1.0 | 2.423739e+12 | 2.423739e+12 | 1.435501e+26 | 0.000000 |
| BallControl | 1.0 | 1.404956e+11 | 1.404956e+11 | 8.321093e+24 | 0.000000 |
| Acceleration | 1.0 | 1.078754e+13 | 1.078754e+13 | 6.389102e+26 | 0.000000 |
| SprintSpeed | 1.0 | 1.251606e+12 | 1.251606e+12 | 7.412851e+25 | 0.000000 |
| Agility | 1.0 | 6.994169e+12 | 6.994169e+12 | 4.142416e+26 | 0.000000 |
| Reactions | 1.0 | 8.916389e+11 | 8.916389e+11 | 5.280884e+25 | 0.000000 |
| Balance | 1.0 | 4.852614e+11 | 4.852614e+11 | 2.874043e+25 | 0.000000 |
| ShotPower | 1.0 | 5.604731e+11 | 5.604731e+11 | 3.319498e+25 | 0.000000 |
| Jumping | 1.0 | 6.057224e+11 | 6.057224e+11 | 3.587495e+25 | 0.000000 |
| Stamina | 1.0 | 6.685164e+13 | 6.685164e+13 | 3.959402e+27 | 0.000000 |
| Strength | 1.0 | 1.536528e+12 | 1.536528e+12 | 9.100352e+25 | 0.000000 |
| LongShots | 1.0 | 1.564985e+11 | 1.564985e+11 | 9.268891e+24 | 0.000000 |
| Aggression | 1.0 | 5.726018e+11 | 5.726018e+11 | 3.391332e+25 | 0.000000 |
| Interceptions | 1.0 | 2.526161e+09 | 2.526161e+09 | 1.496162e+23 | 0.000000 |
| Positioning | 1.0 | 5.407311e+12 | 5.407311e+12 | 3.202572e+26 | 0.000000 |
| Vision | 1.0 | 9.000446e+12 | 9.000446e+12 | 5.330668e+26 | 0.000000 |
| Penalties | 1.0 | 5.567293e+12 | 5.567293e+12 | 3.297325e+26 | 0.000000 |
| Composure | 1.0 | 1.489117e+09 | 1.489117e+09 | 8.819552e+22 | 0.000000 |
| Marking | 1.0 | 4.481368e+12 | 4.481368e+12 | 2.654167e+26 | 0.000000 |
| StandingTackle | 1.0 | 1.326401e+13 | 1.326401e+13 | 7.855837e+26 | 0.000000 |
| SlidingTackle | 1.0 | 3.380766e+11 | 3.380766e+11 | 2.002316e+25 | 0.000000 |
| GKDiving | 1.0 | 9.100301e+10 | 9.100301e+10 | 5.389809e+24 | 0.000000 |
| GKHandling | 1.0 | 2.544972e+12 | 2.544972e+12 | 1.507303e+26 | 0.000000 |
| GKKicking | 1.0 | 4.993807e+10 | 4.993807e+10 | 2.957667e+24 | 0.000000 |
| GKPositioning | 1.0 | 3.515378e+11 | 3.515378e+11 | 2.082043e+25 | 0.000000 |
| GKReflexes | 1.0 | 5.480415e+10 | 5.480415e+10 | 3.245869e+24 | 0.000000 |
| Release_Clause | 1.0 | 2.180764e+16 | 2.180764e+16 | 1.291595e+30 | 0.000000 |
| Release_Clause_Class | 1.0 | 1.002670e-15 | 1.002670e-15 | 5.938485e-02 | 0.807474 |
| Residual | 16592.0 | 2.801439e-10 | 1.688427e-14 | NaN | NaN |
ONE SAMPLE T TEST
# h0: there is no difference in population mean and sample mean
# (mu_population_mean=mu_sample_mean )
#H0: mu>=4585060.98
# h1: there is no difference in population mean and sample mean
# (mu_population_mean!=mu_sample_mean)
#H1: mu<4585060.98
data["Release_Clause"].mean()
4585060.986600974
x=data["Release_Clause"].sample(n=5000)
x.mean()
4588689.2
x=pd.DataFrame(x)
x.isna().sum()
Release_Clause 0 dtype: int64
x=x.dropna(how="all")
from scipy.stats import ttest_1samp,ttest_ind
from statsmodels.stats.power import ttest_power
print('Mean is %2.1f Sd is %2.1f' % (x["Release_Clause"].mean(),np.std(x["Release_Clause"],ddof = 1)))
Mean is 4588689.2 Sd is 10423659.0
t_statistic, p_value = ttest_1samp(x["Release_Clause"],4585060.98)
print(t_statistic, p_value)
0.024612652513128507 0.9803649094834173
alpha=0.05
pvalue=p_value
if pvalue<alpha:
print("reject null and accept alternate")
else:
print("fail to reject null")
fail to reject null
WHEN POPULATION STANDARD DEVIATION IS NOT GIVEN
import scipy.stats as stats
mu = 4585060.98
xbar = x["Release_Clause"].mean()
n= 5000
s=np.std(x["Release_Clause"],ddof = 1)
std_error = s/np.sqrt(n)
t_stat = (xbar - mu)/std_error
t_crit=stats.t.ppf(0.025,n-1)
print( t_stat,t_crit)
0.02461265251312801 -1.9604386466615247
pvalue=stats.t.sf(t_stat,n-1)*2
pvalue
0.9803649094834177
CONFIDENCE INTERVAL
import math
n=len(x["Release_Clause"])
xbar=np.mean(x["Release_Clause"])
pop_stdev=np.std(x["Release_Clause"],ddof=1)
z_critical=stats.norm.isf(0.05)
std_error=pop_stdev/math.sqrt(n)
marginal_error=z_critical*std_error
confidence_interval=(xbar-marginal_error,xbar+marginal_error)
print("confidence interval is : ",confidence_interval)
confidence interval is : (4346216.722757076, 4831161.677242924)
from scipy.stats import f_oneway
from scipy.stats import ttest_1samp,ttest_ind
from statsmodels.stats.power import ttest_power
import matplotlib.pyplot as plt
import scipy
from statsmodels.formula.api import ols
import statsmodels.api as sm
from statsmodels.stats.anova import anova_lm
from scipy.stats import f_oneway
TWO SAMPLE T TEST
# h0: there is no difference in release clause of players who are short and tall in height group
# (mu_tall=mu_short )
# h1: tthere is no difference in release clause of players who are short and tall in height group
# (mu_tall!=mu_short )
# significance level
alpha=0.05
short=data[data["height_group"]=="short"]["Release_Clause"]
tall=data[data["height_group"]=="tall"]["Release_Clause"]
short=short.dropna(how="all")
tall=tall.dropna(how="all")
f_oneway(short,tall)
F_onewayResult(statistic=2.4869645622759378, pvalue=0.11481193892622046)
# using ols
formula="Release_Clause~height_group"
model=ols(formula,data).fit()
anova_table=anova_lm(model)
anova_table
| df | sum_sq | mean_sq | F | PR(>F) | |
|---|---|---|---|---|---|
| height_group | 1.0 | 3.074257e+14 | 3.074257e+14 | 2.486965 | 0.114812 |
| Residual | 16641.0 | 2.057075e+18 | 1.236148e+14 | NaN | NaN |
# Using two sample independent ttest
t,p=stats.ttest_ind(short,tall)
print("test_statistic : ",t)
print("p value : ",p)
test_statistic : -1.577011275253272 p value : 0.11481193892688972
# manual approach
alpha=0.05
n1=len(short)
n2=len(tall)
mean1=np.mean(short)
mean2=np.mean(tall)
sd1=np.std(short,ddof=1)
sd2=np.std(tall,ddof=1)
se1=sd1/np.sqrt(n1)
se2=sd2/np.sqrt(n2)
se=np.sqrt((se1**2+se2**2))
print("Standard Error:",se)
tstats=(mean1-mean2)/se
print("tstats:",tstats)
tcrit=stats.t.isf(alpha/2,n1+n2-2)
print("tcritical:",tcrit)
print("p value",round(2*stats.t.cdf(tstats,n1+n2-2),2))
Standard Error: 182738.55366884512 tstats: -1.5798464147935405 tcritical: 1.9601065505427373 p value 0.11
p_value=0.11
alpha=0.05
if pvalue<alpha:
print("reject null and accept alternate")
else:
print("fail to reject null")
fail to reject null
sns.barplot(x="height_group",y="Release_Clause",data=data)
plt.show()
n=len(short)
xbar=np.mean(short)
pop_stdev=np.std(short,ddof=1)
z_critical=stats.norm.isf(0.05)
std_error=pop_stdev/math.sqrt(n)
marginal_error=z_critical*std_error
confidence_interval=(xbar-marginal_error,xbar+marginal_error)
print("confidence interval is : ",confidence_interval)
confidence interval is : (4315651.609226293, 4663033.694144494)
n=len(tall)
xbar=np.mean(tall)
pop_stdev=np.std(tall,ddof=1)
z_critical=stats.norm.isf(0.05)
std_error=pop_stdev/math.sqrt(n)
marginal_error=z_critical*std_error
confidence_interval=(xbar-marginal_error,xbar+marginal_error)
print("confidence interval is : ",confidence_interval)
confidence interval is : (4532728.10970477, 5023354.891382581)
CHI SQUARE
from scipy.stats import chi2_contingency
# h0: there is no difference in proportion of weight in short and tall players
# h1:there is difference in proportion of weight in short and tall players
z=pd.crosstab(data["weight_group"],data["height_group"])
z
| height_group | short | tall |
|---|---|---|
| weight_group | ||
| average | 8419 | 5144 |
| over | 12 | 323 |
| under | 2694 | 51 |
array = np.array([[9173,5626],[14,355],[2938,53]])
array
array([[9173, 5626],
[ 14, 355],
[2938, 53]])
chi_sq_Stat, p, deg_freedom, exp_freq = stats.chi2_contingency(array)
print('Chi-square statistic %3.5f P value %1.6f' %(chi_sq_Stat, p))
if p< alpha:
print('Reject Null hypothesis and Accept alternate')
else:
print('Fail to reject Null')
Chi-square statistic 2146.43783 P value 0.000000 Reject Null hypothesis and Accept alternate
pd.crosstab(data["weight_group"],data["height_group"]).plot(kind="bar",stacked=True)
plt.show()
REGRESSION MODELLING - RELEASE CLAUSE (TARGET COLUMN)
data= data.apply(lambda x: x.fillna(x.value_counts().index[0]))
x=data.isna().sum()
y=x[x>0]
y
Series([], dtype: int64)
a=data.columns
print("all columns of the dataset are : \n ", a)
print("total number of columns in the dataset are : ", len(a))
all columns of the dataset are :
Index(['ID', 'Age', 'Overall', 'Potential', 'Value', 'Wage', 'Special',
'Preferred_Foot', 'International_Reputation', 'Weak_Foot',
'Skill_Moves', 'Work_Rate', 'Body_Type', 'Real_Face', 'Position',
'Jersey_Number', 'Joined', 'Height', 'Weight', 'LS', 'ST', 'RS', 'LW',
'LF', 'CF', 'RF', 'RW', 'LAM', 'CAM', 'RAM', 'LM', 'LCM', 'CM', 'RCM',
'RM', 'LWB', 'LDM', 'CDM', 'RDM', 'RWB', 'LB', 'LCB', 'CB', 'RCB', 'RB',
'Crossing', 'Finishing', 'HeadingAccuracy', 'ShortPassing', 'Volleys',
'Dribbling', 'Curve', 'FKAccuracy', 'LongPassing', 'BallControl',
'Acceleration', 'SprintSpeed', 'Agility', 'Reactions', 'Balance',
'ShotPower', 'Jumping', 'Stamina', 'Strength', 'LongShots',
'Aggression', 'Interceptions', 'Positioning', 'Vision', 'Penalties',
'Composure', 'Marking', 'StandingTackle', 'SlidingTackle', 'GKDiving',
'GKHandling', 'GKKicking', 'GKPositioning', 'GKReflexes',
'Release_Clause', 'Release_Clause_Class', 'height_group',
'weight_group'],
dtype='object')
total number of columns in the dataset are : 83
cat=[col for col in data.columns if data[col].dtype=="object"]
print("categorical columns in the dataset are : ", cat,end=" ")
print("\ntotal number of categorical columns in the dataset are : ", len(cat))
categorical columns in the dataset are : ['Preferred_Foot', 'Work_Rate', 'Body_Type', 'Real_Face', 'Position', 'LS', 'ST', 'RS', 'LW', 'LF', 'CF', 'RF', 'RW', 'LAM', 'CAM', 'RAM', 'LM', 'LCM', 'CM', 'RCM', 'RM', 'LWB', 'LDM', 'CDM', 'RDM', 'RWB', 'LB', 'LCB', 'CB', 'RCB', 'RB', 'height_group', 'weight_group'] total number of categorical columns in the dataset are : 33
num=[col for col in data.columns if data[col].dtype!="object"]
print("numerical columns in the dataset are : ", num,end=" ")
print("\ntotal number of numerical columns in the dataset are : ", len(num))
numerical columns in the dataset are : ['ID', 'Age', 'Overall', 'Potential', 'Value', 'Wage', 'Special', 'International_Reputation', 'Weak_Foot', 'Skill_Moves', 'Jersey_Number', 'Joined', 'Height', 'Weight', 'Crossing', 'Finishing', 'HeadingAccuracy', 'ShortPassing', 'Volleys', 'Dribbling', 'Curve', 'FKAccuracy', 'LongPassing', 'BallControl', 'Acceleration', 'SprintSpeed', 'Agility', 'Reactions', 'Balance', 'ShotPower', 'Jumping', 'Stamina', 'Strength', 'LongShots', 'Aggression', 'Interceptions', 'Positioning', 'Vision', 'Penalties', 'Composure', 'Marking', 'StandingTackle', 'SlidingTackle', 'GKDiving', 'GKHandling', 'GKKicking', 'GKPositioning', 'GKReflexes', 'Release_Clause', 'Release_Clause_Class'] total number of numerical columns in the dataset are : 50
# for numeric columns
for cols in num:
fig,axes = plt.subplots(1,2, figsize = (15,5))
print("skewness for" , cols , "is : ", data[cols].skew())
print("kurtosis for" , cols , "is : ", data[cols].kurtosis())
sns.distplot(data[cols], bins = 25, ax = axes[0]).set(title = cols , ylabel = 'frequency')
sns.boxplot(y = data[cols], ax = axes[1])
plt.title(cols)
plt.show()
skewness for ID is : -2.2294887370832686 kurtosis for ID is : 9.215202613907739
skewness for Age is : 0.33737127650526305 kurtosis for Age is : -0.5558199895438887
skewness for Overall is : 0.0840283889754774 kurtosis for Overall is : 0.0770865926594575
skewness for Potential is : 0.3135037536792178 kurtosis for Potential is : 0.07158722844751919
skewness for Value is : 7.001447183890468 kurtosis for Value is : 74.85414928231472
skewness for Wage is : 7.9694331748352925 kurtosis for Wage is : 101.42016347890001
skewness for Special is : -0.693448105158702 kurtosis for Special is : 0.2936432400114133
skewness for International_Reputation is : 4.052396631154956 kurtosis for International_Reputation is : 18.85426492020848
skewness for Weak_Foot is : 0.1292694771471369 kurtosis for Weak_Foot is : 0.6596875547151875
skewness for Skill_Moves is : 0.1657353238072117 kurtosis for Skill_Moves is : -0.07337433001886362
skewness for Jersey_Number is : 2.1580889916823938 kurtosis for Jersey_Number is : 6.9536622904917404
skewness for Joined is : -2.520411469757906 kurtosis for Joined is : 9.983203243655023
skewness for Height is : -0.01362442958879466 kurtosis for Height is : -0.20897479023326238
skewness for Weight is : 0.20964318644290506 kurtosis for Weight is : 0.08567097742716356
skewness for Crossing is : -0.5828783953104434 kurtosis for Crossing is : -0.5609433201811647
skewness for Finishing is : -0.28745057253085654 kurtosis for Finishing is : -0.9721998409108616
skewness for HeadingAccuracy is : -0.8735304349427665 kurtosis for HeadingAccuracy is : 0.25682673771751663
skewness for ShortPassing is : -1.0819963062617242 kurtosis for ShortPassing is : 0.7331614857288584
skewness for Volleys is : -0.13883764268550222 kurtosis for Volleys is : -0.720583540094692
skewness for Dribbling is : -1.0657752338192605 kurtosis for Dribbling is : 0.2848063442439166
skewness for Curve is : -0.22917200183511482 kurtosis for Curve is : -0.7526742699547309
skewness for FKAccuracy is : 0.10692323264164297 kurtosis for FKAccuracy is : -0.725120719972959
skewness for LongPassing is : -0.591088020936485 kurtosis for LongPassing is : -0.38158647589289574
skewness for BallControl is : -1.24771951784994 kurtosis for BallControl is : 0.9667493220733099
skewness for Acceleration is : -0.8094956213387197 kurtosis for Acceleration is : 0.44346056908419307
skewness for SprintSpeed is : -0.821405480057497 kurtosis for SprintSpeed is : 0.5060376065526091
skewness for Agility is : -0.5916196768184859 kurtosis for Agility is : -0.0837994676992726
skewness for Reactions is : -0.1051244780388915 kurtosis for Reactions is : 0.05160290971996684
skewness for Balance is : -0.5775953510011431 kurtosis for Balance is : 0.07068945623295386
skewness for ShotPower is : -0.6633040089090644 kurtosis for ShotPower is : -0.36550486413918515
skewness for Jumping is : -0.45822835096521575 kurtosis for Jumping is : 0.32268760602271485
skewness for Stamina is : -0.9058056082472926 kurtosis for Stamina is : 0.4284858107368361
skewness for Strength is : -0.4668370586229843 kurtosis for Strength is : 0.05161687626490563
skewness for LongShots is : -0.4153432107988793 kurtosis for LongShots is : -0.8372083682993052
skewness for Aggression is : -0.4399481987902463 kurtosis for Aggression is : -0.6217941507248774
skewness for Interceptions is : -0.2751945039564909 kurtosis for Interceptions is : -1.2529506016864898
skewness for Positioning is : -0.7041787087503831 kurtosis for Positioning is : -0.4272659382378601
skewness for Vision is : -0.3316057361329239 kurtosis for Vision is : -0.36401920860840287
skewness for Penalties is : -0.34356550216881765 kurtosis for Penalties is : -0.3573261063926192
skewness for Composure is : -0.3664741741505178 kurtosis for Composure is : 0.18052840821701022
skewness for Marking is : -0.36764811674226755 kurtosis for Marking is : -1.082045927265671
skewness for StandingTackle is : -0.3459116999820049 kurtosis for StandingTackle is : -1.3006883260219362
skewness for SlidingTackle is : -0.2747789260775261 kurtosis for SlidingTackle is : -1.3513466751084644
skewness for GKDiving is : 2.4045712409919178 kurtosis for GKDiving is : 4.261801287522408
skewness for GKHandling is : 2.3967001429233896 kurtosis for GKHandling is : 4.256285313269617
skewness for GKKicking is : 2.3948724807829076 kurtosis for GKKicking is : 4.281558236563226
skewness for GKPositioning is : 2.4298917336805377 kurtosis for GKPositioning is : 4.46228338513024
skewness for GKReflexes is : 2.4159407456692086 kurtosis for GKReflexes is : 4.325066910138679
skewness for Release_Clause is : 7.109883679052365 kurtosis for Release_Clause is : 77.14629619358416
skewness for Release_Clause_Class is : -0.200135089571233 kurtosis for Release_Clause_Class is : -1.960181516637769
data=data.drop(['LS', 'ST', 'RS', 'LW', 'LF', 'CF', 'RF', 'RW', 'LAM', 'CAM', 'RAM', 'LM', 'LCM', 'CM',
'RCM', 'RM', 'LWB', 'LDM', 'CDM', 'RDM', 'RWB', 'LB', 'LCB', 'CB', 'RCB', 'RB'],axis=1)
data=pd.get_dummies(data,drop_first=True)
data.rename(columns = {'Release_Clause':'Release Clause',
'International_Reputation':'International Reputation',
'Preferred_Foot':'Preferred Foot','Work_Rate':'Work Rate','Body_Type':'Body Type',
'Real_Face':'Real Face'}, inplace = True)
data.rename(columns = {'Skill_Moves':'Skill Moves',
'Jersey_Number':'Jersey Number',"Weak_Foot":"Weak Foot"}, inplace = True)
import statsmodels.api as sm
X=data.drop("Release Clause",axis=1)
y=data["Release Clause"]
#Adding constant column of ones, mandatory for sm.OLS model
X_constant = sm.add_constant(X)
#Fitting sm.OLS model
model = sm.OLS(y,X_constant).fit()
model.summary()
| Dep. Variable: | Release Clause | R-squared: | 0.990 |
|---|---|---|---|
| Model: | OLS | Adj. R-squared: | 0.990 |
| Method: | Least Squares | F-statistic: | 1.679e+04 |
| Date: | Mon, 15 Aug 2022 | Prob (F-statistic): | 0.00 |
| Time: | 22:22:22 | Log-Likelihood: | -2.5536e+05 |
| No. Observations: | 16643 | AIC: | 5.109e+05 |
| Df Residuals: | 16545 | BIC: | 5.117e+05 |
| Df Model: | 97 | ||
| Covariance Type: | nonrobust |
| coef | std err | t | P>|t| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| const | -5.201e+06 | 8.89e+06 | -0.585 | 0.559 | -2.26e+07 | 1.22e+07 |
| ID | -0.0831 | 0.466 | -0.178 | 0.859 | -0.997 | 0.831 |
| Age | 1.301e+04 | 4959.777 | 2.622 | 0.009 | 3285.065 | 2.27e+04 |
| Overall | -4.246e+04 | 5585.682 | -7.602 | 0.000 | -5.34e+04 | -3.15e+04 |
| Potential | 3.633e+04 | 3628.376 | 10.014 | 0.000 | 2.92e+04 | 4.34e+04 |
| Value | 1.9596 | 0.004 | 532.978 | 0.000 | 1.952 | 1.967 |
| Wage | 2.4113 | 0.848 | 2.842 | 0.004 | 0.748 | 4.074 |
| Special | 1095.8467 | 5051.299 | 0.217 | 0.828 | -8805.241 | 1.1e+04 |
| International Reputation | -6.919e+05 | 3.33e+04 | -20.760 | 0.000 | -7.57e+05 | -6.27e+05 |
| Weak Foot | -1.554e+04 | 1.45e+04 | -1.072 | 0.284 | -4.39e+04 | 1.29e+04 |
| Skill Moves | 3.279e+04 | 2.41e+04 | 1.363 | 0.173 | -1.44e+04 | 7.99e+04 |
| Jersey Number | 446.9301 | 574.130 | 0.778 | 0.436 | -678.427 | 1572.287 |
| Joined | 2429.8789 | 4365.461 | 0.557 | 0.578 | -6126.894 | 1.1e+04 |
| Height | 1.504e+05 | 9.42e+04 | 1.597 | 0.110 | -3.42e+04 | 3.35e+05 |
| Weight | -1801.5865 | 1204.152 | -1.496 | 0.135 | -4161.853 | 558.680 |
| Crossing | 1787.2234 | 5223.807 | 0.342 | 0.732 | -8451.999 | 1.2e+04 |
| Finishing | 6449.7258 | 5269.061 | 1.224 | 0.221 | -3878.199 | 1.68e+04 |
| HeadingAccuracy | 130.9172 | 5215.502 | 0.025 | 0.980 | -1.01e+04 | 1.04e+04 |
| ShortPassing | 4087.7899 | 5487.287 | 0.745 | 0.456 | -6667.881 | 1.48e+04 |
| Volleys | -7880.6693 | 5209.614 | -1.513 | 0.130 | -1.81e+04 | 2330.734 |
| Dribbling | -1512.9219 | 5408.071 | -0.280 | 0.780 | -1.21e+04 | 9087.478 |
| Curve | 1339.0664 | 5208.602 | 0.257 | 0.797 | -8870.353 | 1.15e+04 |
| FKAccuracy | -3209.9265 | 5185.384 | -0.619 | 0.536 | -1.34e+04 | 6953.982 |
| LongPassing | -4386.4750 | 5295.865 | -0.828 | 0.408 | -1.48e+04 | 5993.989 |
| BallControl | 362.3062 | 5560.860 | 0.065 | 0.948 | -1.05e+04 | 1.13e+04 |
| Acceleration | -1799.5142 | 5347.082 | -0.337 | 0.736 | -1.23e+04 | 8681.340 |
| SprintSpeed | -979.7666 | 5290.747 | -0.185 | 0.853 | -1.14e+04 | 9390.666 |
| Agility | -3468.8236 | 5231.747 | -0.663 | 0.507 | -1.37e+04 | 6785.962 |
| Reactions | -1260.4322 | 5399.195 | -0.233 | 0.815 | -1.18e+04 | 9322.571 |
| Balance | -277.8233 | 5218.333 | -0.053 | 0.958 | -1.05e+04 | 9950.671 |
| ShotPower | -1747.1652 | 5226.487 | -0.334 | 0.738 | -1.2e+04 | 8497.310 |
| Jumping | -1023.5525 | 5134.756 | -0.199 | 0.842 | -1.11e+04 | 9041.121 |
| Stamina | -7123.1016 | 5160.703 | -1.380 | 0.168 | -1.72e+04 | 2992.431 |
| Strength | -1837.4149 | 5210.988 | -0.353 | 0.724 | -1.21e+04 | 8376.681 |
| LongShots | 718.0344 | 5236.652 | 0.137 | 0.891 | -9546.365 | 1.1e+04 |
| Aggression | -232.9794 | 5159.012 | -0.045 | 0.964 | -1.03e+04 | 9879.237 |
| Interceptions | -3466.4239 | 5249.463 | -0.660 | 0.509 | -1.38e+04 | 6823.088 |
| Positioning | -2030.4882 | 5266.996 | -0.386 | 0.700 | -1.24e+04 | 8293.389 |
| Vision | -3283.3262 | 5206.216 | -0.631 | 0.528 | -1.35e+04 | 6921.415 |
| Penalties | -2651.0799 | 5183.743 | -0.511 | 0.609 | -1.28e+04 | 7509.613 |
| Composure | 315.7233 | 1457.006 | 0.217 | 0.828 | -2540.164 | 3171.611 |
| Marking | -4089.9889 | 5188.342 | -0.788 | 0.431 | -1.43e+04 | 6079.719 |
| StandingTackle | 3931.1238 | 5474.396 | 0.718 | 0.473 | -6799.279 | 1.47e+04 |
| SlidingTackle | 285.7604 | 5421.581 | 0.053 | 0.958 | -1.03e+04 | 1.09e+04 |
| GKDiving | 1700.9705 | 5716.385 | 0.298 | 0.766 | -9503.759 | 1.29e+04 |
| GKHandling | 5798.2585 | 5752.640 | 1.008 | 0.314 | -5477.533 | 1.71e+04 |
| GKKicking | 1955.7878 | 5648.621 | 0.346 | 0.729 | -9116.116 | 1.3e+04 |
| GKPositioning | 2202.8287 | 5677.591 | 0.388 | 0.698 | -8925.860 | 1.33e+04 |
| GKReflexes | 904.3443 | 5711.569 | 0.158 | 0.874 | -1.03e+04 | 1.21e+04 |
| Release_Clause_Class | -5.179e+04 | 2.96e+04 | -1.750 | 0.080 | -1.1e+05 | 6232.274 |
| Preferred_Foot_Right | 4106.1389 | 2.44e+04 | 0.169 | 0.866 | -4.36e+04 | 5.19e+04 |
| Work_Rate_High/ Low | 5.801e+04 | 6.11e+04 | 0.950 | 0.342 | -6.17e+04 | 1.78e+05 |
| Work_Rate_High/ Medium | 8.366e+04 | 4.36e+04 | 1.920 | 0.055 | -1767.169 | 1.69e+05 |
| Work_Rate_Low/ High | 2.334e+04 | 7.09e+04 | 0.329 | 0.742 | -1.16e+05 | 1.62e+05 |
| Work_Rate_Low/ Low | 3.08e+05 | 2.2e+05 | 1.398 | 0.162 | -1.24e+05 | 7.4e+05 |
| Work_Rate_Low/ Medium | 7.479e+04 | 7.11e+04 | 1.053 | 0.293 | -6.45e+04 | 2.14e+05 |
| Work_Rate_Medium/ High | 8.652e+04 | 4.87e+04 | 1.776 | 0.076 | -8985.880 | 1.82e+05 |
| Work_Rate_Medium/ Low | 7.765e+04 | 5.82e+04 | 1.335 | 0.182 | -3.64e+04 | 1.92e+05 |
| Work_Rate_Medium/ Medium | 5.918e+04 | 4.18e+04 | 1.416 | 0.157 | -2.27e+04 | 1.41e+05 |
| Body_Type_C. Ronaldo | -2.199e+07 | 1.62e+06 | -13.581 | 0.000 | -2.52e+07 | -1.88e+07 |
| Body_Type_Courtois | 1.025e+07 | 1.61e+06 | 6.368 | 0.000 | 7.1e+06 | 1.34e+07 |
| Body_Type_Lean | -6.019e+04 | 1.15e+06 | -0.052 | 0.958 | -2.31e+06 | 2.19e+06 |
| Body_Type_Messi | 1.316e+07 | 1.67e+06 | 7.901 | 0.000 | 9.89e+06 | 1.64e+07 |
| Body_Type_Neymar | -2.015e+06 | 1.63e+06 | -1.240 | 0.215 | -5.2e+06 | 1.17e+06 |
| Body_Type_Normal | -7.964e+04 | 1.15e+06 | -0.069 | 0.945 | -2.33e+06 | 2.17e+06 |
| Body_Type_PLAYER_BODY_TYPE_25 | 1.715e+06 | 1.61e+06 | 1.065 | 0.287 | -1.44e+06 | 4.87e+06 |
| Body_Type_Shaqiri | 4.902e+04 | 1.61e+06 | 0.031 | 0.976 | -3.1e+06 | 3.2e+06 |
| Body_Type_Stocky | -5.827e+04 | 1.15e+06 | -0.051 | 0.960 | -2.31e+06 | 2.19e+06 |
| Real_Face_Yes | 3.713e+05 | 3.69e+04 | 10.065 | 0.000 | 2.99e+05 | 4.44e+05 |
| Position_CB | 1.525e+05 | 6.99e+04 | 2.181 | 0.029 | 1.55e+04 | 2.89e+05 |
| Position_CDM | 2.061e+05 | 6.34e+04 | 3.249 | 0.001 | 8.17e+04 | 3.3e+05 |
| Position_CF | -2.027e+05 | 1.44e+05 | -1.406 | 0.160 | -4.85e+05 | 7.99e+04 |
| Position_CM | 7.695e+04 | 5.46e+04 | 1.409 | 0.159 | -3.01e+04 | 1.84e+05 |
| Position_GK | -6.701e+05 | 1.94e+05 | -3.461 | 0.001 | -1.05e+06 | -2.91e+05 |
| Position_LAM | -3.511e+05 | 2.55e+05 | -1.378 | 0.168 | -8.51e+05 | 1.48e+05 |
| Position_LB | 1.38e+05 | 6.52e+04 | 2.116 | 0.034 | 1.02e+04 | 2.66e+05 |
| Position_LCB | 1.258e+05 | 7.96e+04 | 1.580 | 0.114 | -3.02e+04 | 2.82e+05 |
| Position_LCM | 7.05e+04 | 7.44e+04 | 0.948 | 0.343 | -7.53e+04 | 2.16e+05 |
| Position_LDM | 1.147e+05 | 8.92e+04 | 1.286 | 0.199 | -6.01e+04 | 2.89e+05 |
| Position_LF | -2.705e+06 | 2.94e+05 | -9.213 | 0.000 | -3.28e+06 | -2.13e+06 |
| Position_LM | 1.024e+05 | 5.33e+04 | 1.923 | 0.055 | -1981.401 | 2.07e+05 |
| Position_LS | -1.61e+05 | 9.68e+04 | -1.664 | 0.096 | -3.51e+05 | 2.86e+04 |
| Position_LW | 1.648e+05 | 7.36e+04 | 2.240 | 0.025 | 2.06e+04 | 3.09e+05 |
| Position_LWB | 9.025e+04 | 1.4e+05 | 0.646 | 0.518 | -1.83e+05 | 3.64e+05 |
| Position_RAM | -1.038e+06 | 2.49e+05 | -4.177 | 0.000 | -1.53e+06 | -5.51e+05 |
| Position_RB | 1.604e+05 | 6.44e+04 | 2.489 | 0.013 | 3.41e+04 | 2.87e+05 |
| Position_RCB | 1.476e+05 | 7.99e+04 | 1.848 | 0.065 | -8915.651 | 3.04e+05 |
| Position_RCM | 7.207e+04 | 7.5e+04 | 0.961 | 0.337 | -7.5e+04 | 2.19e+05 |
| Position_RDM | 1.932e+05 | 8.91e+04 | 2.168 | 0.030 | 1.85e+04 | 3.68e+05 |
| Position_RF | -1.675e+06 | 3.26e+05 | -5.136 | 0.000 | -2.31e+06 | -1.04e+06 |
| Position_RM | 1.111e+05 | 5.34e+04 | 2.080 | 0.038 | 6384.441 | 2.16e+05 |
| Position_RS | 4.391e+04 | 9.76e+04 | 0.450 | 0.653 | -1.47e+05 | 2.35e+05 |
| Position_RW | 3.307e+04 | 7.47e+04 | 0.443 | 0.658 | -1.13e+05 | 1.79e+05 |
| Position_RWB | 9.172e+04 | 1.33e+05 | 0.689 | 0.491 | -1.69e+05 | 3.53e+05 |
| Position_ST | 6.502e+04 | 5.72e+04 | 1.137 | 0.255 | -4.7e+04 | 1.77e+05 |
| height_group_tall | -3.246e+04 | 3.12e+04 | -1.040 | 0.298 | -9.36e+04 | 2.87e+04 |
| weight_group_over | -1.962e+05 | 6.96e+04 | -2.819 | 0.005 | -3.33e+05 | -5.98e+04 |
| weight_group_under | -1.829e+04 | 3.29e+04 | -0.555 | 0.579 | -8.29e+04 | 4.63e+04 |
| Omnibus: | 6421.087 | Durbin-Watson: | 1.959 |
|---|---|---|---|
| Prob(Omnibus): | 0.000 | Jarque-Bera (JB): | 1630805.547 |
| Skew: | -0.635 | Prob(JB): | 0.00 |
| Kurtosis: | 51.478 | Cond. No. | 6.39e+09 |
data.shape
(16643, 98)
FEATURE SELECTION
X_1 = sm.add_constant(X)
model = sm.OLS(y,X_1).fit()
model.pvalues
const 5.586486e-01
ID 8.585810e-01
Age 8.738043e-03
Overall 3.077665e-14
Potential 1.547844e-23
...
Position_RWB 4.906341e-01
Position_ST 2.554567e-01
height_group_tall 2.984853e-01
weight_group_over 4.827601e-03
weight_group_under 5.788287e-01
Length: 98, dtype: float64
#Backward Elimination
cols = list(X.columns)
pmax = 1
while (len(cols)>0):
p= []
X_1 = X[cols]
X_1 = sm.add_constant(X_1)
model = sm.OLS(y,X_1).fit()
p = pd.Series(model.pvalues.values[1:],index = cols)
pmax = max(p)
feature_with_p_max = p.idxmax()
if(pmax>0.05):
cols.remove(feature_with_p_max)
else:
break
selected_features_BE = cols
print(selected_features_BE)
['Age', 'Overall', 'Potential', 'Value', 'Wage', 'International Reputation', 'Crossing', 'Finishing', 'ShortPassing', 'Volleys', 'LongPassing', 'Stamina', 'Vision', 'Marking', 'StandingTackle', 'GKHandling', 'Body_Type_C. Ronaldo', 'Body_Type_Courtois', 'Body_Type_Messi', 'Real_Face_Yes', 'Position_CDM', 'Position_GK', 'Position_LF', 'Position_LS', 'Position_RAM', 'Position_RF', 'weight_group_over']
len((selected_features_BE))
27
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
df=data[["Release Clause",'Age', 'Overall', 'Potential', 'Value', 'Wage', 'International Reputation', 'Crossing', 'Finishing', 'ShortPassing', 'Volleys', 'LongPassing', 'Stamina', 'Vision', 'Marking', 'StandingTackle', 'GKHandling', 'Body_Type_C. Ronaldo', 'Body_Type_Courtois', 'Body_Type_Messi', 'Real_Face_Yes', 'Position_CDM', 'Position_GK', 'Position_LF', 'Position_LS', 'Position_RAM', 'Position_RF', 'weight_group_over']]
X=df.drop("Release Clause",axis=1)
y=df["Release Clause"]
#Adding constant column of ones, mandatory for sm.OLS model
X_constant = sm.add_constant(X)
#Fitting sm.OLS model
model = sm.OLS(y,X_constant).fit()
model.summary()
| Dep. Variable: | Release Clause | R-squared: | 0.990 |
|---|---|---|---|
| Model: | OLS | Adj. R-squared: | 0.990 |
| Method: | Least Squares | F-statistic: | 6.030e+04 |
| Date: | Mon, 15 Aug 2022 | Prob (F-statistic): | 0.00 |
| Time: | 22:22:26 | Log-Likelihood: | -2.5540e+05 |
| No. Observations: | 16643 | AIC: | 5.108e+05 |
| Df Residuals: | 16615 | BIC: | 5.111e+05 |
| Df Model: | 27 | ||
| Covariance Type: | nonrobust |
| coef | std err | t | P>|t| | [0.025 | 0.975] | |
|---|---|---|---|---|---|---|
| const | 4.664e+05 | 1.72e+05 | 2.709 | 0.007 | 1.29e+05 | 8.04e+05 |
| Age | 1.38e+04 | 3979.209 | 3.468 | 0.001 | 5999.532 | 2.16e+04 |
| Overall | -4.219e+04 | 3935.752 | -10.720 | 0.000 | -4.99e+04 | -3.45e+04 |
| Potential | 3.618e+04 | 3484.826 | 10.381 | 0.000 | 2.93e+04 | 4.3e+04 |
| Value | 1.9598 | 0.003 | 572.504 | 0.000 | 1.953 | 1.967 |
| Wage | 2.4106 | 0.837 | 2.881 | 0.004 | 0.770 | 4.051 |
| International Reputation | -6.905e+05 | 3.25e+04 | -21.215 | 0.000 | -7.54e+05 | -6.27e+05 |
| Crossing | 3316.9424 | 923.858 | 3.590 | 0.000 | 1506.082 | 5127.803 |
| Finishing | 6892.8765 | 1208.988 | 5.701 | 0.000 | 4523.130 | 9262.623 |
| ShortPassing | 4598.1326 | 1978.453 | 2.324 | 0.020 | 720.154 | 8476.111 |
| Volleys | -6976.9503 | 1157.245 | -6.029 | 0.000 | -9245.274 | -4708.626 |
| LongPassing | -3661.1356 | 1489.028 | -2.459 | 0.014 | -6579.790 | -742.482 |
| Stamina | -6818.3131 | 969.916 | -7.030 | 0.000 | -8719.452 | -4917.175 |
| Vision | -2978.1826 | 1172.147 | -2.541 | 0.011 | -5275.717 | -680.649 |
| Marking | -2769.0323 | 1108.637 | -2.498 | 0.013 | -4942.078 | -595.986 |
| StandingTackle | 5797.5939 | 1088.344 | 5.327 | 0.000 | 3664.324 | 7930.864 |
| GKHandling | 9518.4253 | 2407.542 | 3.954 | 0.000 | 4799.385 | 1.42e+04 |
| Body_Type_C. Ronaldo | -2.184e+07 | 1.13e+06 | -19.249 | 0.000 | -2.41e+07 | -1.96e+07 |
| Body_Type_Courtois | 1.036e+07 | 1.13e+06 | 9.194 | 0.000 | 8.15e+06 | 1.26e+07 |
| Body_Type_Messi | 1.321e+07 | 1.2e+06 | 11.035 | 0.000 | 1.09e+07 | 1.56e+07 |
| Real_Face_Yes | 3.777e+05 | 3.65e+04 | 10.348 | 0.000 | 3.06e+05 | 4.49e+05 |
| Position_CDM | 9.677e+04 | 4.09e+04 | 2.364 | 0.018 | 1.65e+04 | 1.77e+05 |
| Position_GK | -4.247e+05 | 1.33e+05 | -3.187 | 0.001 | -6.86e+05 | -1.63e+05 |
| Position_LF | -2.765e+06 | 2.9e+05 | -9.519 | 0.000 | -3.33e+06 | -2.2e+06 |
| Position_LS | -2.29e+05 | 8.46e+04 | -2.706 | 0.007 | -3.95e+05 | -6.31e+04 |
| Position_RAM | -1.139e+06 | 2.45e+05 | -4.643 | 0.000 | -1.62e+06 | -6.58e+05 |
| Position_RF | -1.714e+06 | 3.23e+05 | -5.298 | 0.000 | -2.35e+06 | -1.08e+06 |
| weight_group_over | -2.114e+05 | 6.35e+04 | -3.330 | 0.001 | -3.36e+05 | -8.7e+04 |
| Omnibus: | 6380.743 | Durbin-Watson: | 1.954 |
|---|---|---|---|
| Prob(Omnibus): | 0.000 | Jarque-Bera (JB): | 1620409.451 |
| Skew: | -0.621 | Prob(JB): | 0.00 |
| Kurtosis: | 51.324 | Cond. No. | 8.67e+08 |
from statsmodels.stats.outliers_influence import variance_inflation_factor
vif=pd.DataFrame()
vif["factor"]=[variance_inflation_factor(X_constant.values,i) for i in range (X_constant.shape[1])]
vif["features"]=X_constant.columns
vif
| factor | features | |
|---|---|---|
| 0 | 394.483982 | const |
| 1 | 4.685931 | Age |
| 2 | 10.120906 | Overall |
| 3 | 6.113049 | Potential |
| 4 | 5.101512 | Value |
| 5 | 4.617565 | Wage |
| 6 | 2.238477 | International Reputation |
| 7 | 3.866488 | Crossing |
| 8 | 7.423025 | Finishing |
| 9 | 11.422243 | ShortPassing |
| 10 | 5.607154 | Volleys |
| 11 | 6.988180 | LongPassing |
| 12 | 3.229543 | Stamina |
| 13 | 3.670714 | Vision |
| 14 | 6.499113 | Marking |
| 15 | 7.407469 | StandingTackle |
| 16 | 22.372930 | GKHandling |
| 17 | 1.028475 | Body_Type_C. Ronaldo |
| 18 | 1.014689 | Body_Type_Courtois |
| 19 | 1.144733 | Body_Type_Messi |
| 20 | 1.459145 | Real_Face_Yes |
| 21 | 1.107730 | Position_CDM |
| 22 | 23.897547 | Position_GK |
| 23 | 1.010669 | Position_LF |
| 24 | 1.041567 | Position_LS |
| 25 | 1.009110 | Position_RAM |
| 26 | 1.086007 | Position_RF |
| 27 | 1.057891 | weight_group_over |
#MODEL 1 - SIMPLE LINEAR REGRESSION
X_train, X_test, y_train,y_test = train_test_split(X,y, test_size=0.3,random_state=42)
lr = LinearRegression()
lr.fit(X_train,y_train)
y_pred_train = lr.predict(X_train)
y_pred_test = lr.predict(X_test)
from sklearn.metrics import r2_score,mean_squared_error
print('R-squared on train data :',r2_score(y_train, y_pred_train))
print('R-squared on test data :',r2_score(y_test, y_pred_test))
print('\nRMSE on train data :', np.sqrt(mean_squared_error(y_train, y_pred_train)))
print('RMSE on test data :', np.sqrt(mean_squared_error(y_test, y_pred_test)))
R-squared on train data : 0.9901742271819832 R-squared on test data : 0.9887766278808289 RMSE on train data : 1094986.677280849 RMSE on test data : 1195383.387125873
Coefficients = pd.DataFrame({'Features':X.columns, 'Coefficient':lr.coef_})
Coefficients
| Features | Coefficient | |
|---|---|---|
| 0 | Age | 1.597535e+04 |
| 1 | Overall | -4.338150e+04 |
| 2 | Potential | 3.624759e+04 |
| 3 | Value | 1.967426e+00 |
| 4 | Wage | 1.613862e+00 |
| 5 | International Reputation | -7.280892e+05 |
| 6 | Crossing | 2.973803e+03 |
| 7 | Finishing | 6.320073e+03 |
| 8 | ShortPassing | 4.343190e+03 |
| 9 | Volleys | -6.563284e+03 |
| 10 | LongPassing | -3.740367e+03 |
| 11 | Stamina | -7.180118e+03 |
| 12 | Vision | -2.883650e+03 |
| 13 | Marking | -4.289760e+03 |
| 14 | StandingTackle | 7.157875e+03 |
| 15 | GKHandling | 9.900991e+03 |
| 16 | Body_Type_C. Ronaldo | -2.190050e+07 |
| 17 | Body_Type_Courtois | 2.328306e-08 |
| 18 | Body_Type_Messi | -3.725290e-09 |
| 19 | Real_Face_Yes | 3.764197e+05 |
| 20 | Position_CDM | 9.993918e+04 |
| 21 | Position_GK | -4.706803e+05 |
| 22 | Position_LF | -1.702166e+06 |
| 23 | Position_LS | -2.189129e+05 |
| 24 | Position_RAM | -6.739480e+05 |
| 25 | Position_RF | -1.948272e+06 |
| 26 | weight_group_over | -2.492186e+05 |
Coefficients.sort_values(by="Coefficient",ascending=False)
| Features | Coefficient | |
|---|---|---|
| 19 | Real_Face_Yes | 3.764197e+05 |
| 20 | Position_CDM | 9.993918e+04 |
| 2 | Potential | 3.624759e+04 |
| 0 | Age | 1.597535e+04 |
| 15 | GKHandling | 9.900991e+03 |
| 14 | StandingTackle | 7.157875e+03 |
| 7 | Finishing | 6.320073e+03 |
| 8 | ShortPassing | 4.343190e+03 |
| 6 | Crossing | 2.973803e+03 |
| 3 | Value | 1.967426e+00 |
| 4 | Wage | 1.613862e+00 |
| 17 | Body_Type_Courtois | 2.328306e-08 |
| 18 | Body_Type_Messi | -3.725290e-09 |
| 12 | Vision | -2.883650e+03 |
| 10 | LongPassing | -3.740367e+03 |
| 13 | Marking | -4.289760e+03 |
| 9 | Volleys | -6.563284e+03 |
| 11 | Stamina | -7.180118e+03 |
| 1 | Overall | -4.338150e+04 |
| 23 | Position_LS | -2.189129e+05 |
| 26 | weight_group_over | -2.492186e+05 |
| 21 | Position_GK | -4.706803e+05 |
| 24 | Position_RAM | -6.739480e+05 |
| 5 | International Reputation | -7.280892e+05 |
| 22 | Position_LF | -1.702166e+06 |
| 25 | Position_RF | -1.948272e+06 |
| 16 | Body_Type_C. Ronaldo | -2.190050e+07 |
# MODEL 2 Random Forest
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor
lr=RandomForestRegressor()
lr.fit(X_train,y_train)
y_pred_train = lr.predict(X_train)
y_pred_test = lr.predict(X_test)
from sklearn.metrics import r2_score,mean_squared_error
print('R-squared on train data :',r2_score(y_train, y_pred_train))
print('R-squared on test data :',r2_score(y_test, y_pred_test))
print('\nRMSE on train data :', np.sqrt(mean_squared_error(y_train, y_pred_train)))
print('RMSE on test data :', np.sqrt(mean_squared_error(y_test, y_pred_test)))
R-squared on train data : 0.9980137020500968 R-squared on test data : 0.9872857597977909 RMSE on train data : 492320.2200635701 RMSE on test data : 1272303.5550455693
mse=np.mean((y_pred_test-y_test)**2)
rmse=np.sqrt(mse)
print("root_mean_squared_error :", rmse)
mae=(abs(y_pred_test-y_test)).mean()
print("mean_absolute_error :", mae)
root_mean_squared_error : 1272303.5550455693 mean_absolute_error : 435422.741838574
ypred=pd.DataFrame(y_pred_test,columns=["predicted"])
ytest=pd.DataFrame(y_test)
ytest=ytest.reset_index(drop=True)
new=pd.concat([ytest,ypred],axis=1)
new
| Release Clause | predicted | |
|---|---|---|
| 0 | 8500000.0 | 9378000.0 |
| 1 | 11500000.0 | 9418000.0 |
| 2 | 2300000.0 | 2670000.0 |
| 3 | 1400000.0 | 1695000.0 |
| 4 | 998000.0 | 1014900.0 |
| ... | ... | ... |
| 4988 | 10200000.0 | 11188000.0 |
| 4989 | 522000.0 | 443090.0 |
| 4990 | 165000.0 | 148010.0 |
| 4991 | 979000.0 | 1155100.0 |
| 4992 | 20800000.0 | 20046000.0 |
4993 rows × 2 columns
# MODEL 3 Decision Tree
lr=DecisionTreeRegressor()
lr.fit(X_train,y_train)
y_pred_train = lr.predict(X_train)
y_pred_test = lr.predict(X_test)
from sklearn.metrics import r2_score,mean_squared_error
print('R-squared on train data :',r2_score(y_train, y_pred_train))
print('R-squared on test data :',r2_score(y_test, y_pred_test))
print('\nRMSE on train data :', np.sqrt(mean_squared_error(y_train, y_pred_train)))
print('RMSE on test data :', np.sqrt(mean_squared_error(y_test, y_pred_test)))
R-squared on train data : 1.0 R-squared on test data : 0.9827141181642447 RMSE on train data : 0.0 RMSE on test data : 1483512.8231380344
# MODEL 4 Cat Boost Regressor
from catboost import CatBoostRegressor
lr=CatBoostRegressor()
lr.fit(X_train,y_train)
y_pred_train = lr.predict(X_train)
y_pred_test = lr.predict(X_test)
Learning rate set to 0.060347 0: learn: 10488066.8373636 total: 56.3ms remaining: 56.2s 1: learn: 9978720.4772153 total: 58ms remaining: 28.9s 2: learn: 9475828.2125520 total: 59.6ms remaining: 19.8s 3: learn: 9006619.0458813 total: 61.3ms remaining: 15.3s 4: learn: 8581137.3867293 total: 62.9ms remaining: 12.5s 5: learn: 8152730.5626454 total: 64.9ms remaining: 10.7s 6: learn: 7750216.2788976 total: 66.5ms remaining: 9.44s 7: learn: 7392713.6418500 total: 68.3ms remaining: 8.47s 8: learn: 7026244.8229838 total: 69.9ms remaining: 7.7s 9: learn: 6668058.6023341 total: 71.5ms remaining: 7.08s 10: learn: 6359509.9011004 total: 73.4ms remaining: 6.6s 11: learn: 6057124.2304189 total: 74.9ms remaining: 6.16s 12: learn: 5764820.2012091 total: 76.4ms remaining: 5.8s 13: learn: 5484049.2831049 total: 78.2ms remaining: 5.51s 14: learn: 5223620.4277393 total: 80ms remaining: 5.25s 15: learn: 4998928.0121972 total: 81.7ms remaining: 5.02s 16: learn: 4764210.7966173 total: 84.3ms remaining: 4.87s 17: learn: 4543898.5534170 total: 86.2ms remaining: 4.7s 18: learn: 4345557.5571778 total: 88.1ms remaining: 4.55s 19: learn: 4152270.2882473 total: 89.8ms remaining: 4.4s 20: learn: 3968891.4657271 total: 91.4ms remaining: 4.26s 21: learn: 3792891.9482488 total: 93ms remaining: 4.13s 22: learn: 3637953.9010459 total: 94.6ms remaining: 4.02s 23: learn: 3477109.1161335 total: 96.4ms remaining: 3.92s 24: learn: 3331262.5859561 total: 98.3ms remaining: 3.83s 25: learn: 3191629.8093379 total: 99.8ms remaining: 3.74s 26: learn: 3060855.5012408 total: 101ms remaining: 3.65s 27: learn: 2935122.1840016 total: 103ms remaining: 3.58s 28: learn: 2822126.5824562 total: 105ms remaining: 3.51s 29: learn: 2708125.5533309 total: 107ms remaining: 3.45s 30: learn: 2608275.2074658 total: 109ms remaining: 3.39s 31: learn: 2507237.1945423 total: 110ms remaining: 3.34s 32: learn: 2410682.4096309 total: 112ms remaining: 3.28s 33: learn: 2325457.2983423 total: 114ms remaining: 3.23s 34: learn: 2244363.3958290 total: 115ms remaining: 3.18s 35: learn: 2169078.7196974 total: 117ms remaining: 3.13s 36: learn: 2102121.9058825 total: 118ms remaining: 3.08s 37: learn: 2036962.9259072 total: 120ms remaining: 3.04s 38: learn: 1972632.8332462 total: 122ms remaining: 3s 39: learn: 1911705.0395858 total: 123ms remaining: 2.96s 40: learn: 1856459.4555476 total: 125ms remaining: 2.92s 41: learn: 1806186.8301644 total: 127ms remaining: 2.89s 42: learn: 1761688.2568049 total: 129ms remaining: 2.87s 43: learn: 1714969.1303185 total: 131ms remaining: 2.83s 44: learn: 1672580.4547689 total: 132ms remaining: 2.8s 45: learn: 1638343.8744790 total: 134ms remaining: 2.78s 46: learn: 1601640.8911727 total: 136ms remaining: 2.75s 47: learn: 1565143.4906179 total: 137ms remaining: 2.72s 48: learn: 1534972.1737857 total: 138ms remaining: 2.69s 49: learn: 1509964.2397978 total: 140ms remaining: 2.66s 50: learn: 1483026.6680046 total: 142ms remaining: 2.65s 51: learn: 1456357.2112701 total: 144ms remaining: 2.63s 52: learn: 1431887.1691493 total: 146ms remaining: 2.6s 53: learn: 1408054.9670573 total: 147ms remaining: 2.58s 54: learn: 1387098.0310771 total: 149ms remaining: 2.56s 55: learn: 1366188.0528512 total: 151ms remaining: 2.54s 56: learn: 1346688.2337409 total: 152ms remaining: 2.52s 57: learn: 1326565.2122960 total: 154ms remaining: 2.49s 58: learn: 1309972.2400762 total: 155ms remaining: 2.48s 59: learn: 1291944.8514106 total: 157ms remaining: 2.47s 60: learn: 1274931.2372619 total: 160ms remaining: 2.46s 61: learn: 1270194.8526997 total: 161ms remaining: 2.44s 62: learn: 1266259.2551704 total: 163ms remaining: 2.42s 63: learn: 1261717.8290494 total: 164ms remaining: 2.4s 64: learn: 1247085.2356588 total: 166ms remaining: 2.39s 65: learn: 1234210.2437114 total: 168ms remaining: 2.37s 66: learn: 1230191.6340519 total: 169ms remaining: 2.35s 67: learn: 1226183.8534503 total: 171ms remaining: 2.34s 68: learn: 1221977.3362119 total: 173ms remaining: 2.34s 69: learn: 1217131.2288343 total: 175ms remaining: 2.33s 70: learn: 1213183.7676285 total: 177ms remaining: 2.31s 71: learn: 1209450.2144408 total: 179ms remaining: 2.3s 72: learn: 1205485.5950732 total: 180ms remaining: 2.29s 73: learn: 1201978.0222340 total: 182ms remaining: 2.28s 74: learn: 1198470.4482986 total: 184ms remaining: 2.27s 75: learn: 1187501.5902192 total: 186ms remaining: 2.27s 76: learn: 1174100.8479893 total: 189ms remaining: 2.27s 77: learn: 1162921.7827509 total: 191ms remaining: 2.26s 78: learn: 1160199.3201856 total: 192ms remaining: 2.24s 79: learn: 1148932.9256550 total: 194ms remaining: 2.23s 80: learn: 1138314.1039632 total: 196ms remaining: 2.22s 81: learn: 1135762.4632385 total: 197ms remaining: 2.21s 82: learn: 1133112.3686824 total: 199ms remaining: 2.2s 83: learn: 1130483.6789980 total: 201ms remaining: 2.19s 84: learn: 1127892.6351876 total: 203ms remaining: 2.18s 85: learn: 1116485.4290347 total: 205ms remaining: 2.18s 86: learn: 1112694.2445562 total: 207ms remaining: 2.17s 87: learn: 1110465.4441744 total: 209ms remaining: 2.17s 88: learn: 1108252.4561619 total: 211ms remaining: 2.16s 89: learn: 1105991.7931128 total: 213ms remaining: 2.15s 90: learn: 1097814.8195893 total: 215ms remaining: 2.15s 91: learn: 1095697.1289994 total: 217ms remaining: 2.14s 92: learn: 1087921.5030328 total: 219ms remaining: 2.14s 93: learn: 1085521.3937241 total: 222ms remaining: 2.13s 94: learn: 1084005.5524110 total: 223ms remaining: 2.12s 95: learn: 1081594.0327328 total: 224ms remaining: 2.11s 96: learn: 1079561.2014488 total: 226ms remaining: 2.1s 97: learn: 1071024.7127667 total: 228ms remaining: 2.1s 98: learn: 1064008.6297896 total: 230ms remaining: 2.09s 99: learn: 1062387.3134100 total: 231ms remaining: 2.08s 100: learn: 1060687.6087546 total: 233ms remaining: 2.07s 101: learn: 1059145.0973547 total: 234ms remaining: 2.06s 102: learn: 1057561.3309260 total: 236ms remaining: 2.05s 103: learn: 1049578.2104045 total: 238ms remaining: 2.05s 104: learn: 1042333.1719472 total: 239ms remaining: 2.04s 105: learn: 1037850.0686794 total: 241ms remaining: 2.03s 106: learn: 1036549.3904410 total: 243ms remaining: 2.03s 107: learn: 1030920.6711125 total: 245ms remaining: 2.02s 108: learn: 1027681.4499880 total: 247ms remaining: 2.02s 109: learn: 1025992.0501955 total: 248ms remaining: 2s 110: learn: 1024951.0092300 total: 249ms remaining: 1.99s 111: learn: 1022751.7845169 total: 250ms remaining: 1.98s 112: learn: 1018477.6640224 total: 252ms remaining: 1.98s 113: learn: 1013573.1629516 total: 253ms remaining: 1.97s 114: learn: 1011738.7673801 total: 255ms remaining: 1.96s 115: learn: 1010617.1075656 total: 256ms remaining: 1.95s 116: learn: 1009573.6037198 total: 258ms remaining: 1.94s 117: learn: 1003276.7432871 total: 259ms remaining: 1.94s 118: learn: 998475.1374315 total: 261ms remaining: 1.93s 119: learn: 994877.6389612 total: 262ms remaining: 1.92s 120: learn: 993788.4563318 total: 264ms remaining: 1.92s 121: learn: 992859.7691891 total: 265ms remaining: 1.91s 122: learn: 987984.0890655 total: 267ms remaining: 1.9s 123: learn: 981921.9025847 total: 268ms remaining: 1.9s 124: learn: 981001.5716291 total: 270ms remaining: 1.89s 125: learn: 980188.0586713 total: 271ms remaining: 1.88s 126: learn: 978359.2547060 total: 273ms remaining: 1.88s 127: learn: 974495.9481162 total: 274ms remaining: 1.87s 128: learn: 973733.0651158 total: 276ms remaining: 1.86s 129: learn: 972954.1434518 total: 277ms remaining: 1.86s 130: learn: 972281.7270741 total: 279ms remaining: 1.85s 131: learn: 971554.9445037 total: 280ms remaining: 1.84s 132: learn: 970875.2346565 total: 282ms remaining: 1.84s 133: learn: 967427.1564627 total: 283ms remaining: 1.83s 134: learn: 966812.0076300 total: 285ms remaining: 1.82s 135: learn: 966195.9054674 total: 286ms remaining: 1.82s 136: learn: 964405.5117907 total: 288ms remaining: 1.81s 137: learn: 963922.5563305 total: 289ms remaining: 1.8s 138: learn: 962353.3900476 total: 291ms remaining: 1.8s 139: learn: 961713.9586683 total: 292ms remaining: 1.79s 140: learn: 957906.3960967 total: 294ms remaining: 1.79s 141: learn: 954655.2278164 total: 295ms remaining: 1.78s 142: learn: 954210.3361293 total: 296ms remaining: 1.78s 143: learn: 953674.9556644 total: 298ms remaining: 1.77s 144: learn: 949613.4376966 total: 300ms remaining: 1.77s 145: learn: 946538.2473615 total: 301ms remaining: 1.76s 146: learn: 946032.5223368 total: 302ms remaining: 1.75s 147: learn: 940962.3994477 total: 304ms remaining: 1.75s 148: learn: 940446.4517501 total: 306ms remaining: 1.75s 149: learn: 936551.7691059 total: 307ms remaining: 1.74s 150: learn: 936082.7530735 total: 308ms remaining: 1.73s 151: learn: 933543.1178173 total: 310ms remaining: 1.73s 152: learn: 930553.7654796 total: 311ms remaining: 1.72s 153: learn: 928941.8277460 total: 313ms remaining: 1.72s 154: learn: 928433.3244874 total: 314ms remaining: 1.71s 155: learn: 925293.8313873 total: 316ms remaining: 1.71s 156: learn: 924331.0043088 total: 317ms remaining: 1.7s 157: learn: 923919.7073301 total: 319ms remaining: 1.7s 158: learn: 923542.5504736 total: 320ms remaining: 1.69s 159: learn: 920614.2734493 total: 322ms remaining: 1.69s 160: learn: 917745.0070823 total: 323ms remaining: 1.68s 161: learn: 915515.5108791 total: 325ms remaining: 1.68s 162: learn: 913221.9051102 total: 326ms remaining: 1.68s 163: learn: 912864.0757678 total: 328ms remaining: 1.67s 164: learn: 910833.3268341 total: 330ms remaining: 1.67s 165: learn: 910499.9842785 total: 331ms remaining: 1.66s 166: learn: 906820.9550901 total: 332ms remaining: 1.66s 167: learn: 906259.0702018 total: 334ms remaining: 1.65s 168: learn: 905977.8427708 total: 335ms remaining: 1.65s 169: learn: 905685.5229176 total: 336ms remaining: 1.64s 170: learn: 902300.6021359 total: 338ms remaining: 1.64s 171: learn: 899668.6074936 total: 339ms remaining: 1.63s 172: learn: 899424.0782751 total: 340ms remaining: 1.63s 173: learn: 897828.9805040 total: 342ms remaining: 1.62s 174: learn: 897510.1842094 total: 343ms remaining: 1.62s 175: learn: 895546.2040095 total: 345ms remaining: 1.61s 176: learn: 893535.4161636 total: 346ms remaining: 1.61s 177: learn: 890400.9243640 total: 348ms remaining: 1.61s 178: learn: 887474.4420458 total: 349ms remaining: 1.6s 179: learn: 885524.1201270 total: 351ms remaining: 1.6s 180: learn: 883964.4776172 total: 353ms remaining: 1.59s 181: learn: 881248.6935826 total: 354ms remaining: 1.59s 182: learn: 878038.0465685 total: 355ms remaining: 1.59s 183: learn: 877823.7732790 total: 357ms remaining: 1.58s 184: learn: 876406.9689027 total: 358ms remaining: 1.58s 185: learn: 874121.9458818 total: 360ms remaining: 1.57s 186: learn: 872745.6280932 total: 361ms remaining: 1.57s 187: learn: 870876.0955850 total: 362ms remaining: 1.56s 188: learn: 868634.9034695 total: 364ms remaining: 1.56s 189: learn: 866696.3087022 total: 365ms remaining: 1.56s 190: learn: 864607.1551154 total: 367ms remaining: 1.55s 191: learn: 860810.0287411 total: 368ms remaining: 1.55s 192: learn: 858140.3462344 total: 370ms remaining: 1.55s 193: learn: 856755.7107047 total: 371ms remaining: 1.54s 194: learn: 854915.0599762 total: 373ms remaining: 1.54s 195: learn: 853535.4439953 total: 375ms remaining: 1.54s 196: learn: 853370.0818826 total: 377ms remaining: 1.53s 197: learn: 851963.0294312 total: 378ms remaining: 1.53s 198: learn: 850534.7280617 total: 380ms remaining: 1.53s 199: learn: 849097.1990629 total: 381ms remaining: 1.52s 200: learn: 847245.0180589 total: 383ms remaining: 1.52s 201: learn: 847096.8817983 total: 384ms remaining: 1.52s 202: learn: 846953.1574858 total: 386ms remaining: 1.51s 203: learn: 845311.1013253 total: 388ms remaining: 1.51s 204: learn: 845173.0697492 total: 389ms remaining: 1.51s 205: learn: 843505.2169516 total: 391ms remaining: 1.51s 206: learn: 841831.4805193 total: 393ms remaining: 1.5s 207: learn: 841740.5816122 total: 394ms remaining: 1.5s 208: learn: 841652.3956034 total: 396ms remaining: 1.5s 209: learn: 841566.8417767 total: 397ms remaining: 1.49s 210: learn: 840890.4131182 total: 399ms remaining: 1.49s 211: learn: 838860.2908791 total: 401ms remaining: 1.49s 212: learn: 836655.5617735 total: 402ms remaining: 1.49s 213: learn: 833715.3961835 total: 404ms remaining: 1.48s 214: learn: 832964.6938958 total: 406ms remaining: 1.48s 215: learn: 832257.0628705 total: 408ms remaining: 1.48s 216: learn: 831192.5761187 total: 409ms remaining: 1.48s 217: learn: 828257.2296869 total: 411ms remaining: 1.47s 218: learn: 825634.1910889 total: 413ms remaining: 1.47s 219: learn: 823539.6813113 total: 415ms remaining: 1.47s 220: learn: 822053.5985669 total: 417ms remaining: 1.47s 221: learn: 820583.9067397 total: 419ms remaining: 1.47s 222: learn: 818623.2469314 total: 420ms remaining: 1.46s 223: learn: 817256.4592628 total: 422ms remaining: 1.46s 224: learn: 816017.0531702 total: 423ms remaining: 1.46s 225: learn: 814944.7189343 total: 425ms remaining: 1.46s 226: learn: 813265.6692037 total: 426ms remaining: 1.45s 227: learn: 810343.2562920 total: 428ms remaining: 1.45s 228: learn: 810235.6926708 total: 429ms remaining: 1.45s 229: learn: 809058.0108242 total: 431ms remaining: 1.44s 230: learn: 808108.5963444 total: 432ms remaining: 1.44s 231: learn: 807100.2912978 total: 434ms remaining: 1.44s 232: learn: 804890.5620639 total: 436ms remaining: 1.44s 233: learn: 803622.1079490 total: 438ms remaining: 1.43s 234: learn: 802434.9896319 total: 439ms remaining: 1.43s 235: learn: 800902.5921477 total: 440ms remaining: 1.43s 236: learn: 799820.0305617 total: 442ms remaining: 1.42s 237: learn: 797026.1772868 total: 444ms remaining: 1.42s 238: learn: 796036.3244322 total: 445ms remaining: 1.42s 239: learn: 794243.9749176 total: 447ms remaining: 1.42s 240: learn: 793855.5770629 total: 448ms remaining: 1.41s 241: learn: 792869.3305266 total: 450ms remaining: 1.41s 242: learn: 790455.1811174 total: 451ms remaining: 1.41s 243: learn: 790368.1362825 total: 453ms remaining: 1.4s 244: learn: 789496.1903799 total: 454ms remaining: 1.4s 245: learn: 788023.2943330 total: 456ms remaining: 1.4s 246: learn: 785763.3152841 total: 457ms remaining: 1.39s 247: learn: 784210.7670386 total: 459ms remaining: 1.39s 248: learn: 783290.6913837 total: 460ms remaining: 1.39s 249: learn: 782271.8807260 total: 462ms remaining: 1.39s 250: learn: 781425.5907172 total: 463ms remaining: 1.38s 251: learn: 780405.5156738 total: 465ms remaining: 1.38s 252: learn: 778882.2143293 total: 466ms remaining: 1.38s 253: learn: 778096.5298110 total: 468ms remaining: 1.37s 254: learn: 776381.2153458 total: 470ms remaining: 1.37s 255: learn: 774532.2708535 total: 471ms remaining: 1.37s 256: learn: 773422.9942692 total: 473ms remaining: 1.37s 257: learn: 771002.4366640 total: 475ms remaining: 1.36s 258: learn: 770921.9933449 total: 476ms remaining: 1.36s 259: learn: 770843.9525168 total: 478ms remaining: 1.36s 260: learn: 769683.5065065 total: 479ms remaining: 1.36s 261: learn: 768875.0199686 total: 480ms remaining: 1.35s 262: learn: 767854.8712340 total: 482ms remaining: 1.35s 263: learn: 765773.4366239 total: 484ms remaining: 1.35s 264: learn: 764253.8675083 total: 485ms remaining: 1.35s 265: learn: 764070.8319981 total: 487ms remaining: 1.34s 266: learn: 763849.8744691 total: 488ms remaining: 1.34s 267: learn: 761977.2918577 total: 489ms remaining: 1.34s 268: learn: 760208.4650809 total: 491ms remaining: 1.33s 269: learn: 757716.3719381 total: 493ms remaining: 1.33s 270: learn: 757576.6908656 total: 495ms remaining: 1.33s 271: learn: 756251.3978527 total: 496ms remaining: 1.33s 272: learn: 755050.5269913 total: 497ms remaining: 1.32s 273: learn: 753274.4787333 total: 499ms remaining: 1.32s 274: learn: 753141.5263309 total: 501ms remaining: 1.32s 275: learn: 752990.4333782 total: 502ms remaining: 1.32s 276: learn: 751899.2592866 total: 503ms remaining: 1.31s 277: learn: 751113.4881176 total: 505ms remaining: 1.31s 278: learn: 749700.1539818 total: 507ms remaining: 1.31s 279: learn: 748374.9567173 total: 508ms remaining: 1.31s 280: learn: 748208.7700311 total: 509ms remaining: 1.3s 281: learn: 747332.6054899 total: 511ms remaining: 1.3s 282: learn: 746658.5515224 total: 513ms remaining: 1.3s 283: learn: 745181.0758963 total: 514ms remaining: 1.3s 284: learn: 745057.7723777 total: 516ms remaining: 1.29s 285: learn: 743066.8444374 total: 517ms remaining: 1.29s 286: learn: 741839.8367926 total: 519ms remaining: 1.29s 287: learn: 740862.6954235 total: 520ms remaining: 1.29s 288: learn: 739988.9198232 total: 522ms remaining: 1.28s 289: learn: 739745.6640044 total: 523ms remaining: 1.28s 290: learn: 739604.7440196 total: 525ms remaining: 1.28s 291: learn: 738419.5494636 total: 526ms remaining: 1.28s 292: learn: 736835.6917411 total: 528ms remaining: 1.27s 293: learn: 735994.1301371 total: 530ms remaining: 1.27s 294: learn: 735172.2402344 total: 531ms remaining: 1.27s 295: learn: 733991.5735469 total: 532ms remaining: 1.26s 296: learn: 733212.9586082 total: 534ms remaining: 1.26s 297: learn: 731476.3869341 total: 536ms remaining: 1.26s 298: learn: 730388.0227004 total: 537ms remaining: 1.26s 299: learn: 729609.4427496 total: 538ms remaining: 1.26s 300: learn: 728587.6435966 total: 540ms remaining: 1.25s 301: learn: 727612.9195846 total: 541ms remaining: 1.25s 302: learn: 726646.0653586 total: 543ms remaining: 1.25s 303: learn: 725290.1426977 total: 544ms remaining: 1.25s 304: learn: 724348.0238846 total: 546ms remaining: 1.24s 305: learn: 722127.2472376 total: 547ms remaining: 1.24s 306: learn: 720851.2055824 total: 549ms remaining: 1.24s 307: learn: 718189.4337026 total: 551ms remaining: 1.24s 308: learn: 716775.5070749 total: 553ms remaining: 1.24s 309: learn: 716532.8838680 total: 554ms remaining: 1.23s 310: learn: 716068.8266591 total: 556ms remaining: 1.23s 311: learn: 715056.1286842 total: 557ms remaining: 1.23s 312: learn: 714106.0904486 total: 559ms remaining: 1.23s 313: learn: 713163.3267741 total: 561ms remaining: 1.23s 314: learn: 713037.7813930 total: 563ms remaining: 1.22s 315: learn: 712195.0962113 total: 564ms remaining: 1.22s 316: learn: 711358.4556476 total: 566ms remaining: 1.22s 317: learn: 711070.0370710 total: 568ms remaining: 1.22s 318: learn: 709577.8497103 total: 569ms remaining: 1.22s 319: learn: 709520.6047434 total: 571ms remaining: 1.21s 320: learn: 709460.2003037 total: 573ms remaining: 1.21s 321: learn: 708361.3220429 total: 574ms remaining: 1.21s 322: learn: 706669.5693013 total: 576ms remaining: 1.21s 323: learn: 705300.5051109 total: 578ms remaining: 1.21s 324: learn: 704400.1072725 total: 580ms remaining: 1.2s 325: learn: 703666.2955760 total: 581ms remaining: 1.2s 326: learn: 702782.4181733 total: 583ms remaining: 1.2s 327: learn: 702176.4513298 total: 585ms remaining: 1.2s 328: learn: 700760.6602599 total: 587ms remaining: 1.2s 329: learn: 699867.1448628 total: 589ms remaining: 1.2s 330: learn: 698670.0195536 total: 590ms remaining: 1.19s 331: learn: 698183.4105485 total: 592ms remaining: 1.19s 332: learn: 698070.8874922 total: 593ms remaining: 1.19s 333: learn: 697964.4317580 total: 595ms remaining: 1.19s 334: learn: 697236.2953530 total: 596ms remaining: 1.18s 335: learn: 696912.8649726 total: 598ms remaining: 1.18s 336: learn: 695279.8474575 total: 599ms remaining: 1.18s 337: learn: 694387.5094787 total: 601ms remaining: 1.18s 338: learn: 693142.8133096 total: 603ms remaining: 1.18s 339: learn: 692901.8608850 total: 604ms remaining: 1.17s 340: learn: 692244.1272074 total: 606ms remaining: 1.17s 341: learn: 691529.4058436 total: 607ms remaining: 1.17s 342: learn: 690867.2961642 total: 609ms remaining: 1.17s 343: learn: 690767.4253575 total: 610ms remaining: 1.16s 344: learn: 690614.5303320 total: 611ms remaining: 1.16s 345: learn: 689746.0550096 total: 613ms remaining: 1.16s 346: learn: 689327.1632987 total: 614ms remaining: 1.16s 347: learn: 688677.6447696 total: 616ms remaining: 1.15s 348: learn: 687404.1628767 total: 618ms remaining: 1.15s 349: learn: 685320.6038258 total: 619ms remaining: 1.15s 350: learn: 685294.0771086 total: 621ms remaining: 1.15s 351: learn: 684410.6704641 total: 622ms remaining: 1.15s 352: learn: 683444.4333699 total: 624ms remaining: 1.14s 353: learn: 682340.4657028 total: 625ms remaining: 1.14s 354: learn: 682313.0193303 total: 627ms remaining: 1.14s 355: learn: 681926.6683857 total: 628ms remaining: 1.14s 356: learn: 681169.4458009 total: 630ms remaining: 1.13s 357: learn: 680546.0599563 total: 632ms remaining: 1.13s 358: learn: 679088.3288399 total: 633ms remaining: 1.13s 359: learn: 678070.1429266 total: 635ms remaining: 1.13s 360: learn: 677263.7919819 total: 636ms remaining: 1.13s 361: learn: 676288.7928200 total: 638ms remaining: 1.12s 362: learn: 676265.4888616 total: 639ms remaining: 1.12s 363: learn: 675167.0763026 total: 641ms remaining: 1.12s 364: learn: 674182.0713928 total: 642ms remaining: 1.12s 365: learn: 673439.2801258 total: 643ms remaining: 1.11s 366: learn: 673087.2720282 total: 645ms remaining: 1.11s 367: learn: 672155.5321127 total: 647ms remaining: 1.11s 368: learn: 671590.1939737 total: 648ms remaining: 1.11s 369: learn: 671400.5445853 total: 650ms remaining: 1.1s 370: learn: 670498.5266321 total: 651ms remaining: 1.1s 371: learn: 669926.4371236 total: 652ms remaining: 1.1s 372: learn: 669341.2750658 total: 654ms remaining: 1.1s 373: learn: 668141.2554091 total: 656ms remaining: 1.1s 374: learn: 667306.4462862 total: 657ms remaining: 1.09s 375: learn: 666327.1743728 total: 659ms remaining: 1.09s 376: learn: 665409.7534516 total: 660ms remaining: 1.09s 377: learn: 664698.0811589 total: 662ms remaining: 1.09s 378: learn: 663370.8988330 total: 663ms remaining: 1.09s 379: learn: 663320.7823579 total: 665ms remaining: 1.08s 380: learn: 662494.3309452 total: 666ms remaining: 1.08s 381: learn: 661235.9287100 total: 668ms remaining: 1.08s 382: learn: 660646.0597625 total: 669ms remaining: 1.08s 383: learn: 659724.5499987 total: 671ms remaining: 1.08s 384: learn: 659145.4742348 total: 673ms remaining: 1.07s 385: learn: 658157.5508600 total: 674ms remaining: 1.07s 386: learn: 657217.6041680 total: 676ms remaining: 1.07s 387: learn: 656567.0361078 total: 677ms remaining: 1.07s 388: learn: 656255.2753805 total: 679ms remaining: 1.06s 389: learn: 655630.6636193 total: 680ms remaining: 1.06s 390: learn: 654712.0991805 total: 682ms remaining: 1.06s 391: learn: 653422.7435489 total: 683ms remaining: 1.06s 392: learn: 652438.0423021 total: 685ms remaining: 1.06s 393: learn: 651671.8902948 total: 687ms remaining: 1.06s 394: learn: 650879.8587905 total: 688ms remaining: 1.05s 395: learn: 650357.7129403 total: 690ms remaining: 1.05s 396: learn: 649284.8208882 total: 691ms remaining: 1.05s 397: learn: 648783.2456585 total: 693ms remaining: 1.05s 398: learn: 648301.8625730 total: 694ms remaining: 1.04s 399: learn: 648024.3753675 total: 696ms remaining: 1.04s 400: learn: 647085.9216878 total: 697ms remaining: 1.04s 401: learn: 646496.4963642 total: 699ms remaining: 1.04s 402: learn: 645036.1002213 total: 700ms remaining: 1.04s 403: learn: 644600.5395867 total: 701ms remaining: 1.03s 404: learn: 643580.5611663 total: 703ms remaining: 1.03s 405: learn: 643115.3954245 total: 705ms remaining: 1.03s 406: learn: 642326.1790587 total: 707ms remaining: 1.03s 407: learn: 641597.2918324 total: 708ms remaining: 1.03s 408: learn: 641304.3753706 total: 709ms remaining: 1.02s 409: learn: 640640.8686866 total: 711ms remaining: 1.02s 410: learn: 640498.8023811 total: 712ms remaining: 1.02s 411: learn: 640067.9164610 total: 714ms remaining: 1.02s 412: learn: 639325.5626780 total: 716ms remaining: 1.02s 413: learn: 638385.2515412 total: 718ms remaining: 1.02s 414: learn: 637536.5154763 total: 720ms remaining: 1.01s 415: learn: 636944.9579684 total: 721ms remaining: 1.01s 416: learn: 635394.4643819 total: 723ms remaining: 1.01s 417: learn: 635068.1055798 total: 724ms remaining: 1.01s 418: learn: 634431.1956823 total: 725ms remaining: 1s 419: learn: 632793.8263320 total: 727ms remaining: 1s 420: learn: 631847.2893081 total: 729ms remaining: 1s 421: learn: 630935.9583036 total: 730ms remaining: 1s 422: learn: 630167.5438267 total: 732ms remaining: 998ms 423: learn: 629930.2225694 total: 733ms remaining: 996ms 424: learn: 629418.8088116 total: 736ms remaining: 996ms 425: learn: 628996.9725949 total: 738ms remaining: 994ms 426: learn: 628521.5476042 total: 739ms remaining: 992ms 427: learn: 627700.8724877 total: 741ms remaining: 990ms 428: learn: 627471.4435165 total: 742ms remaining: 988ms 429: learn: 627256.2842711 total: 744ms remaining: 986ms 430: learn: 626308.6353487 total: 745ms remaining: 984ms 431: learn: 625773.1156814 total: 747ms remaining: 983ms 432: learn: 624626.6303895 total: 749ms remaining: 981ms 433: learn: 624312.7046084 total: 751ms remaining: 979ms 434: learn: 623569.3175845 total: 753ms remaining: 978ms 435: learn: 622702.1606473 total: 755ms remaining: 976ms 436: learn: 622025.7805790 total: 756ms remaining: 974ms 437: learn: 620966.2531945 total: 758ms remaining: 972ms 438: learn: 620511.5227595 total: 759ms remaining: 970ms 439: learn: 620124.0270699 total: 761ms remaining: 968ms 440: learn: 619510.2676979 total: 762ms remaining: 966ms 441: learn: 618822.2365682 total: 764ms remaining: 964ms 442: learn: 618095.3479991 total: 766ms remaining: 963ms 443: learn: 617781.4109950 total: 768ms remaining: 962ms 444: learn: 617167.6567402 total: 770ms remaining: 960ms 445: learn: 616220.3379265 total: 772ms remaining: 959ms 446: learn: 616019.8949231 total: 774ms remaining: 957ms 447: learn: 615705.8206404 total: 775ms remaining: 955ms 448: learn: 614879.2992671 total: 778ms remaining: 954ms 449: learn: 613858.8582491 total: 780ms remaining: 953ms 450: learn: 612810.6964958 total: 782ms remaining: 952ms 451: learn: 612301.3598053 total: 784ms remaining: 950ms 452: learn: 611660.9974321 total: 785ms remaining: 948ms 453: learn: 611257.9566176 total: 787ms remaining: 947ms 454: learn: 610598.1248232 total: 789ms remaining: 946ms 455: learn: 610018.0007302 total: 792ms remaining: 945ms 456: learn: 609288.4315090 total: 794ms remaining: 943ms 457: learn: 608294.8959516 total: 795ms remaining: 941ms 458: learn: 607356.7457752 total: 797ms remaining: 939ms 459: learn: 606690.0676031 total: 799ms remaining: 937ms 460: learn: 605959.3118635 total: 800ms remaining: 935ms 461: learn: 605475.6588005 total: 802ms remaining: 934ms 462: learn: 605292.9717546 total: 804ms remaining: 932ms 463: learn: 604993.4699992 total: 805ms remaining: 930ms 464: learn: 604514.0323252 total: 807ms remaining: 929ms 465: learn: 603783.4031850 total: 810ms remaining: 928ms 466: learn: 603645.7110401 total: 812ms remaining: 927ms 467: learn: 602437.7439526 total: 814ms remaining: 925ms 468: learn: 601729.3883485 total: 816ms remaining: 924ms 469: learn: 601115.6568806 total: 818ms remaining: 923ms 470: learn: 600405.5130029 total: 820ms remaining: 921ms 471: learn: 599785.5564759 total: 822ms remaining: 919ms 472: learn: 599178.8827337 total: 825ms remaining: 919ms 473: learn: 598720.6732962 total: 827ms remaining: 918ms 474: learn: 598076.5710494 total: 829ms remaining: 916ms 475: learn: 597318.2010417 total: 832ms remaining: 916ms 476: learn: 597127.7320599 total: 833ms remaining: 914ms 477: learn: 596354.7671217 total: 835ms remaining: 912ms 478: learn: 595899.8365113 total: 837ms remaining: 911ms 479: learn: 595008.4460077 total: 841ms remaining: 911ms 480: learn: 593963.8889618 total: 843ms remaining: 909ms 481: learn: 593358.5044098 total: 845ms remaining: 908ms 482: learn: 592790.4023801 total: 847ms remaining: 906ms 483: learn: 592324.4061859 total: 849ms remaining: 905ms 484: learn: 591893.1347618 total: 851ms remaining: 903ms 485: learn: 591455.5810347 total: 853ms remaining: 902ms 486: learn: 590914.4234015 total: 856ms remaining: 901ms 487: learn: 590405.1407561 total: 858ms remaining: 900ms 488: learn: 589645.5540253 total: 859ms remaining: 898ms 489: learn: 589117.0527764 total: 862ms remaining: 897ms 490: learn: 588637.8276287 total: 864ms remaining: 896ms 491: learn: 588220.0379095 total: 866ms remaining: 894ms 492: learn: 587584.6875805 total: 869ms remaining: 893ms 493: learn: 587020.9876838 total: 870ms remaining: 892ms 494: learn: 586532.9756865 total: 872ms remaining: 890ms 495: learn: 585884.3527455 total: 874ms remaining: 888ms 496: learn: 585058.7358166 total: 876ms remaining: 887ms 497: learn: 584473.3399826 total: 878ms remaining: 885ms 498: learn: 583624.4589300 total: 880ms remaining: 883ms 499: learn: 583510.1879965 total: 881ms remaining: 881ms 500: learn: 582961.8252597 total: 884ms remaining: 880ms 501: learn: 582512.0630859 total: 886ms remaining: 879ms 502: learn: 582125.4715476 total: 888ms remaining: 877ms 503: learn: 581609.1110092 total: 890ms remaining: 876ms 504: learn: 581126.5754837 total: 892ms remaining: 875ms 505: learn: 580010.0357918 total: 894ms remaining: 873ms 506: learn: 579309.4307324 total: 896ms remaining: 871ms 507: learn: 578776.3500922 total: 898ms remaining: 870ms 508: learn: 578206.3961010 total: 900ms remaining: 868ms 509: learn: 577687.6773190 total: 901ms remaining: 866ms 510: learn: 577252.6101028 total: 903ms remaining: 864ms 511: learn: 576885.8775572 total: 906ms remaining: 863ms 512: learn: 576602.1855806 total: 908ms remaining: 862ms 513: learn: 575980.0741233 total: 909ms remaining: 860ms 514: learn: 575491.9387634 total: 911ms remaining: 858ms 515: learn: 574912.6492368 total: 913ms remaining: 857ms 516: learn: 574343.6210721 total: 915ms remaining: 855ms 517: learn: 573886.7545957 total: 917ms remaining: 853ms 518: learn: 573443.0495742 total: 919ms remaining: 851ms 519: learn: 572937.8686766 total: 921ms remaining: 850ms 520: learn: 572051.4682763 total: 923ms remaining: 849ms 521: learn: 571601.2381939 total: 925ms remaining: 847ms 522: learn: 570986.9522176 total: 927ms remaining: 846ms 523: learn: 570969.1933478 total: 929ms remaining: 844ms 524: learn: 570533.3062825 total: 931ms remaining: 842ms 525: learn: 570041.9500792 total: 933ms remaining: 840ms 526: learn: 569939.5016528 total: 935ms remaining: 839ms 527: learn: 569414.9394210 total: 938ms remaining: 838ms 528: learn: 568611.7811477 total: 939ms remaining: 836ms 529: learn: 567634.6811613 total: 941ms remaining: 835ms 530: learn: 567393.0363493 total: 943ms remaining: 833ms 531: learn: 567015.1874384 total: 945ms remaining: 831ms 532: learn: 566646.1797393 total: 947ms remaining: 829ms 533: learn: 566228.0435775 total: 949ms remaining: 828ms 534: learn: 565846.6808641 total: 951ms remaining: 826ms 535: learn: 565391.9068723 total: 952ms remaining: 824ms 536: learn: 564605.5571957 total: 954ms remaining: 823ms 537: learn: 564588.3144222 total: 956ms remaining: 821ms 538: learn: 563790.4361206 total: 958ms remaining: 819ms 539: learn: 563401.9173710 total: 960ms remaining: 817ms 540: learn: 562927.8865762 total: 961ms remaining: 815ms 541: learn: 562487.3113698 total: 963ms remaining: 813ms 542: learn: 562074.8846692 total: 965ms remaining: 812ms 543: learn: 561506.7800676 total: 967ms remaining: 810ms 544: learn: 561224.6698523 total: 969ms remaining: 809ms 545: learn: 561209.4168301 total: 970ms remaining: 806ms 546: learn: 560402.1053331 total: 972ms remaining: 805ms 547: learn: 559817.5733872 total: 974ms remaining: 803ms 548: learn: 559311.7677690 total: 975ms remaining: 801ms 549: learn: 558605.0660181 total: 977ms remaining: 799ms 550: learn: 558590.7812847 total: 979ms remaining: 797ms 551: learn: 558134.5252388 total: 981ms remaining: 796ms 552: learn: 558120.7704956 total: 982ms remaining: 794ms 553: learn: 557381.5599675 total: 983ms remaining: 792ms 554: learn: 556863.3614996 total: 985ms remaining: 790ms 555: learn: 556850.0061048 total: 987ms remaining: 788ms 556: learn: 555843.4605710 total: 989ms remaining: 786ms 557: learn: 555830.3210076 total: 990ms remaining: 784ms 558: learn: 555337.9438497 total: 991ms remaining: 782ms 559: learn: 554650.1186817 total: 993ms remaining: 781ms 560: learn: 554249.3089719 total: 995ms remaining: 779ms 561: learn: 553875.5410281 total: 996ms remaining: 777ms 562: learn: 553079.1401555 total: 998ms remaining: 775ms 563: learn: 553065.7851532 total: 999ms remaining: 773ms 564: learn: 552610.3793400 total: 1s remaining: 771ms 565: learn: 552296.2039784 total: 1s remaining: 769ms 566: learn: 552283.7812421 total: 1s remaining: 767ms 567: learn: 552271.8903471 total: 1s remaining: 765ms 568: learn: 551672.0254108 total: 1.01s remaining: 763ms 569: learn: 551398.4978856 total: 1.01s remaining: 762ms 570: learn: 550924.9156389 total: 1.01s remaining: 760ms 571: learn: 550637.8603142 total: 1.01s remaining: 758ms 572: learn: 550278.5659782 total: 1.01s remaining: 756ms 573: learn: 550079.2612382 total: 1.01s remaining: 754ms 574: learn: 549538.5868300 total: 1.02s remaining: 752ms 575: learn: 549038.4246437 total: 1.02s remaining: 750ms 576: learn: 548270.1149547 total: 1.02s remaining: 748ms 577: learn: 548181.2519813 total: 1.02s remaining: 746ms 578: learn: 547859.2357346 total: 1.02s remaining: 745ms 579: learn: 547419.1072634 total: 1.02s remaining: 743ms 580: learn: 546344.6532026 total: 1.03s remaining: 741ms 581: learn: 546036.5638638 total: 1.03s remaining: 739ms 582: learn: 545410.2112754 total: 1.03s remaining: 737ms 583: learn: 545054.2491350 total: 1.03s remaining: 735ms 584: learn: 545040.2632077 total: 1.03s remaining: 733ms 585: learn: 545017.2921955 total: 1.03s remaining: 731ms 586: learn: 544497.1090562 total: 1.04s remaining: 729ms 587: learn: 543735.9230739 total: 1.04s remaining: 728ms 588: learn: 543065.2969017 total: 1.04s remaining: 726ms 589: learn: 542446.4348978 total: 1.04s remaining: 724ms 590: learn: 541977.6788713 total: 1.04s remaining: 722ms 591: learn: 541673.7362804 total: 1.04s remaining: 721ms 592: learn: 541293.6565717 total: 1.05s remaining: 719ms 593: learn: 540625.6785507 total: 1.05s remaining: 717ms 594: learn: 540395.1357678 total: 1.05s remaining: 715ms 595: learn: 540082.9042351 total: 1.05s remaining: 714ms 596: learn: 540060.6318871 total: 1.05s remaining: 712ms 597: learn: 539713.5759660 total: 1.05s remaining: 710ms 598: learn: 539647.7011965 total: 1.06s remaining: 708ms 599: learn: 539010.1447113 total: 1.06s remaining: 706ms 600: learn: 538626.2223052 total: 1.06s remaining: 704ms 601: learn: 538585.7142113 total: 1.06s remaining: 702ms 602: learn: 538229.2837948 total: 1.06s remaining: 700ms 603: learn: 537851.3267439 total: 1.06s remaining: 699ms 604: learn: 537708.9160302 total: 1.07s remaining: 697ms 605: learn: 536903.5519968 total: 1.07s remaining: 695ms 606: learn: 536625.8510296 total: 1.07s remaining: 693ms 607: learn: 536204.1000152 total: 1.07s remaining: 691ms 608: learn: 535944.5104393 total: 1.07s remaining: 689ms 609: learn: 534945.2605092 total: 1.07s remaining: 688ms 610: learn: 534369.9129093 total: 1.08s remaining: 686ms 611: learn: 534197.3584778 total: 1.08s remaining: 684ms 612: learn: 533926.2424710 total: 1.08s remaining: 682ms 613: learn: 533787.9378372 total: 1.08s remaining: 681ms 614: learn: 533097.2539169 total: 1.08s remaining: 679ms 615: learn: 532865.8517871 total: 1.08s remaining: 677ms 616: learn: 532609.1912472 total: 1.09s remaining: 675ms 617: learn: 532512.8722517 total: 1.09s remaining: 673ms 618: learn: 531903.2203019 total: 1.09s remaining: 672ms 619: learn: 531430.8195756 total: 1.09s remaining: 670ms 620: learn: 531076.6970250 total: 1.09s remaining: 668ms 621: learn: 530128.0157654 total: 1.09s remaining: 666ms 622: learn: 529818.0450129 total: 1.1s remaining: 664ms 623: learn: 529410.4981559 total: 1.1s remaining: 663ms 624: learn: 528896.9920794 total: 1.1s remaining: 661ms 625: learn: 528333.7154071 total: 1.1s remaining: 659ms 626: learn: 528078.1018174 total: 1.1s remaining: 657ms 627: learn: 527557.4217566 total: 1.11s remaining: 655ms 628: learn: 527245.3764432 total: 1.11s remaining: 653ms 629: learn: 526897.7198693 total: 1.11s remaining: 651ms 630: learn: 526467.5231238 total: 1.11s remaining: 650ms 631: learn: 526205.3649199 total: 1.11s remaining: 648ms 632: learn: 525438.4591311 total: 1.11s remaining: 646ms 633: learn: 525162.6435472 total: 1.12s remaining: 644ms 634: learn: 524704.6277897 total: 1.12s remaining: 642ms 635: learn: 524393.4274488 total: 1.12s remaining: 641ms 636: learn: 524161.7686953 total: 1.12s remaining: 639ms 637: learn: 523374.3524131 total: 1.12s remaining: 638ms 638: learn: 523184.3122856 total: 1.13s remaining: 636ms 639: learn: 522685.8117707 total: 1.13s remaining: 634ms 640: learn: 522437.8146248 total: 1.13s remaining: 632ms 641: learn: 522213.7583054 total: 1.13s remaining: 630ms 642: learn: 521666.9028696 total: 1.13s remaining: 629ms 643: learn: 521491.3470049 total: 1.13s remaining: 627ms 644: learn: 521215.8150371 total: 1.14s remaining: 625ms 645: learn: 520936.8784805 total: 1.14s remaining: 623ms 646: learn: 520430.4868972 total: 1.14s remaining: 622ms 647: learn: 520139.0240205 total: 1.14s remaining: 620ms 648: learn: 519775.8762090 total: 1.14s remaining: 618ms 649: learn: 519348.8114774 total: 1.15s remaining: 617ms 650: learn: 519050.3693759 total: 1.15s remaining: 615ms 651: learn: 518737.8282534 total: 1.15s remaining: 613ms 652: learn: 518521.0134997 total: 1.15s remaining: 612ms 653: learn: 518150.6328760 total: 1.15s remaining: 610ms 654: learn: 517830.7939700 total: 1.15s remaining: 608ms 655: learn: 517217.6358184 total: 1.16s remaining: 606ms 656: learn: 516979.0634629 total: 1.16s remaining: 604ms 657: learn: 516601.6116958 total: 1.16s remaining: 602ms 658: learn: 516201.9531346 total: 1.16s remaining: 601ms 659: learn: 515907.3323901 total: 1.16s remaining: 599ms 660: learn: 515231.1408901 total: 1.16s remaining: 597ms 661: learn: 514398.2670063 total: 1.17s remaining: 596ms 662: learn: 513959.8742003 total: 1.17s remaining: 594ms 663: learn: 513486.7954754 total: 1.17s remaining: 592ms 664: learn: 513086.8277120 total: 1.17s remaining: 590ms 665: learn: 512734.8046206 total: 1.17s remaining: 588ms 666: learn: 511822.9546810 total: 1.17s remaining: 586ms 667: learn: 511063.3347723 total: 1.18s remaining: 585ms 668: learn: 510515.7083968 total: 1.18s remaining: 583ms 669: learn: 510274.2638360 total: 1.18s remaining: 581ms 670: learn: 509943.9848783 total: 1.18s remaining: 579ms 671: learn: 509502.3602302 total: 1.18s remaining: 577ms 672: learn: 509290.1902093 total: 1.18s remaining: 576ms 673: learn: 508925.6607418 total: 1.19s remaining: 574ms 674: learn: 508398.6910766 total: 1.19s remaining: 572ms 675: learn: 507790.3368635 total: 1.19s remaining: 570ms 676: learn: 507343.2867001 total: 1.19s remaining: 568ms 677: learn: 506919.2639245 total: 1.19s remaining: 567ms 678: learn: 506374.9498975 total: 1.19s remaining: 565ms 679: learn: 505711.2717246 total: 1.2s remaining: 563ms 680: learn: 505268.7343548 total: 1.2s remaining: 561ms 681: learn: 505074.5177240 total: 1.2s remaining: 559ms 682: learn: 504693.9086920 total: 1.2s remaining: 558ms 683: learn: 504311.0627348 total: 1.2s remaining: 556ms 684: learn: 504042.4761343 total: 1.2s remaining: 554ms 685: learn: 503652.6419758 total: 1.21s remaining: 552ms 686: learn: 503384.0500127 total: 1.21s remaining: 550ms 687: learn: 502958.8479855 total: 1.21s remaining: 548ms 688: learn: 502449.3368187 total: 1.21s remaining: 546ms 689: learn: 502158.2674475 total: 1.21s remaining: 544ms 690: learn: 501561.6059362 total: 1.21s remaining: 543ms 691: learn: 501256.5896678 total: 1.22s remaining: 541ms 692: learn: 500744.9181733 total: 1.22s remaining: 539ms 693: learn: 500415.1390807 total: 1.22s remaining: 537ms 694: learn: 500083.9087945 total: 1.22s remaining: 535ms 695: learn: 499908.4377188 total: 1.22s remaining: 534ms 696: learn: 499767.9657845 total: 1.22s remaining: 532ms 697: learn: 499327.9574174 total: 1.22s remaining: 530ms 698: learn: 498466.7745424 total: 1.23s remaining: 528ms 699: learn: 498457.8700823 total: 1.23s remaining: 526ms 700: learn: 497732.3005811 total: 1.23s remaining: 524ms 701: learn: 497217.1350449 total: 1.23s remaining: 522ms 702: learn: 497081.2985617 total: 1.23s remaining: 521ms 703: learn: 496859.6522786 total: 1.23s remaining: 519ms 704: learn: 496619.0506990 total: 1.24s remaining: 517ms 705: learn: 496248.7610671 total: 1.24s remaining: 515ms 706: learn: 496010.7129782 total: 1.24s remaining: 513ms 707: learn: 495754.4123097 total: 1.24s remaining: 511ms 708: learn: 495164.4352701 total: 1.24s remaining: 510ms 709: learn: 494596.6775040 total: 1.24s remaining: 508ms 710: learn: 494278.7462026 total: 1.24s remaining: 506ms 711: learn: 494270.0680347 total: 1.25s remaining: 504ms 712: learn: 493768.3445891 total: 1.25s remaining: 502ms 713: learn: 493246.3611003 total: 1.25s remaining: 500ms 714: learn: 493143.0271902 total: 1.25s remaining: 498ms 715: learn: 492832.5168693 total: 1.25s remaining: 496ms 716: learn: 492737.8955015 total: 1.25s remaining: 495ms 717: learn: 492399.9106832 total: 1.25s remaining: 493ms 718: learn: 492053.1012353 total: 1.25s remaining: 491ms 719: learn: 492046.9586246 total: 1.26s remaining: 489ms 720: learn: 491355.5756472 total: 1.26s remaining: 487ms 721: learn: 491280.4199426 total: 1.26s remaining: 485ms 722: learn: 491014.9700963 total: 1.26s remaining: 483ms 723: learn: 490636.8889391 total: 1.26s remaining: 482ms 724: learn: 490589.1705362 total: 1.26s remaining: 480ms 725: learn: 490237.3695018 total: 1.27s remaining: 478ms 726: learn: 489989.3226268 total: 1.27s remaining: 476ms 727: learn: 489645.8230440 total: 1.27s remaining: 474ms 728: learn: 489304.2715046 total: 1.27s remaining: 472ms 729: learn: 489000.2238592 total: 1.27s remaining: 470ms 730: learn: 488910.6775499 total: 1.27s remaining: 469ms 731: learn: 488644.1437815 total: 1.27s remaining: 467ms 732: learn: 488245.7636253 total: 1.28s remaining: 465ms 733: learn: 488074.2194389 total: 1.28s remaining: 463ms 734: learn: 488031.0407224 total: 1.28s remaining: 461ms 735: learn: 487715.7215346 total: 1.28s remaining: 459ms 736: learn: 487241.6917250 total: 1.28s remaining: 458ms 737: learn: 486803.9363281 total: 1.28s remaining: 456ms 738: learn: 486642.5029676 total: 1.28s remaining: 454ms 739: learn: 486008.4982344 total: 1.29s remaining: 452ms 740: learn: 485814.2733818 total: 1.29s remaining: 450ms 741: learn: 485301.0146848 total: 1.29s remaining: 449ms 742: learn: 485018.9994106 total: 1.29s remaining: 447ms 743: learn: 484971.9447785 total: 1.29s remaining: 445ms 744: learn: 484635.9058343 total: 1.29s remaining: 443ms 745: learn: 484114.9601518 total: 1.3s remaining: 441ms 746: learn: 483831.0185022 total: 1.3s remaining: 439ms 747: learn: 483558.1067741 total: 1.3s remaining: 438ms 748: learn: 483257.1876600 total: 1.3s remaining: 436ms 749: learn: 482826.6283920 total: 1.3s remaining: 434ms 750: learn: 482092.2725978 total: 1.3s remaining: 432ms 751: learn: 481882.3556205 total: 1.3s remaining: 430ms 752: learn: 481384.3937575 total: 1.31s remaining: 429ms 753: learn: 481166.0804171 total: 1.31s remaining: 427ms 754: learn: 480449.4294155 total: 1.31s remaining: 425ms 755: learn: 480141.6425368 total: 1.31s remaining: 424ms 756: learn: 479965.6847843 total: 1.31s remaining: 422ms 757: learn: 479634.8979677 total: 1.31s remaining: 420ms 758: learn: 479375.3600178 total: 1.32s remaining: 418ms 759: learn: 479170.8339164 total: 1.32s remaining: 417ms 760: learn: 478966.6151483 total: 1.32s remaining: 415ms 761: learn: 478702.4941290 total: 1.32s remaining: 413ms 762: learn: 478279.7308061 total: 1.32s remaining: 411ms 763: learn: 478212.9833995 total: 1.32s remaining: 409ms 764: learn: 478000.1920954 total: 1.33s remaining: 408ms 765: learn: 477793.2338557 total: 1.33s remaining: 406ms 766: learn: 477530.7562605 total: 1.33s remaining: 404ms 767: learn: 477266.5605995 total: 1.33s remaining: 402ms 768: learn: 477043.6234637 total: 1.33s remaining: 401ms 769: learn: 476756.2986925 total: 1.33s remaining: 399ms 770: learn: 476449.3158332 total: 1.34s remaining: 397ms 771: learn: 476105.9373490 total: 1.34s remaining: 395ms 772: learn: 475832.6206196 total: 1.34s remaining: 394ms 773: learn: 475765.2412465 total: 1.34s remaining: 392ms 774: learn: 475562.9614587 total: 1.34s remaining: 390ms 775: learn: 475498.2465659 total: 1.34s remaining: 388ms 776: learn: 474924.3922888 total: 1.35s remaining: 386ms 777: learn: 474610.1409890 total: 1.35s remaining: 385ms 778: learn: 474471.3208506 total: 1.35s remaining: 383ms 779: learn: 474259.5786057 total: 1.35s remaining: 381ms 780: learn: 474098.7188134 total: 1.35s remaining: 379ms 781: learn: 473819.9108683 total: 1.35s remaining: 378ms 782: learn: 473749.5350543 total: 1.35s remaining: 376ms 783: learn: 473513.9762347 total: 1.36s remaining: 374ms 784: learn: 473428.5456181 total: 1.36s remaining: 372ms 785: learn: 473339.8175497 total: 1.36s remaining: 370ms 786: learn: 472960.1066619 total: 1.36s remaining: 369ms 787: learn: 472645.9834107 total: 1.36s remaining: 367ms 788: learn: 472236.0627795 total: 1.36s remaining: 365ms 789: learn: 471765.5420542 total: 1.37s remaining: 363ms 790: learn: 471396.6843690 total: 1.37s remaining: 362ms 791: learn: 471088.8677505 total: 1.37s remaining: 360ms 792: learn: 470683.5776264 total: 1.37s remaining: 358ms 793: learn: 470496.4214966 total: 1.37s remaining: 356ms 794: learn: 469954.3760656 total: 1.37s remaining: 354ms 795: learn: 469637.4334606 total: 1.38s remaining: 353ms 796: learn: 469121.9241177 total: 1.38s remaining: 351ms 797: learn: 468853.8509277 total: 1.38s remaining: 349ms 798: learn: 468541.5205427 total: 1.38s remaining: 348ms 799: learn: 468169.2473621 total: 1.38s remaining: 346ms 800: learn: 467968.5588308 total: 1.38s remaining: 344ms 801: learn: 467819.2378569 total: 1.39s remaining: 342ms 802: learn: 467421.0182578 total: 1.39s remaining: 340ms 803: learn: 467117.3676951 total: 1.39s remaining: 339ms 804: learn: 466976.8888907 total: 1.39s remaining: 337ms 805: learn: 466710.9100166 total: 1.39s remaining: 335ms 806: learn: 466472.9138093 total: 1.39s remaining: 333ms 807: learn: 466417.6664324 total: 1.4s remaining: 332ms 808: learn: 466278.9979795 total: 1.4s remaining: 330ms 809: learn: 466153.4731022 total: 1.4s remaining: 328ms 810: learn: 465604.2184871 total: 1.4s remaining: 327ms 811: learn: 465258.3267143 total: 1.4s remaining: 325ms 812: learn: 465034.6496414 total: 1.4s remaining: 323ms 813: learn: 464616.8698843 total: 1.41s remaining: 321ms 814: learn: 464316.1895999 total: 1.41s remaining: 320ms 815: learn: 464053.6662763 total: 1.41s remaining: 318ms 816: learn: 463976.9109130 total: 1.41s remaining: 316ms 817: learn: 463625.8819300 total: 1.41s remaining: 314ms 818: learn: 463241.2062390 total: 1.41s remaining: 313ms 819: learn: 463002.2416837 total: 1.42s remaining: 311ms 820: learn: 462588.3189403 total: 1.42s remaining: 309ms 821: learn: 462137.7890122 total: 1.42s remaining: 308ms 822: learn: 461667.9371196 total: 1.42s remaining: 306ms 823: learn: 461318.4716541 total: 1.42s remaining: 304ms 824: learn: 461114.9398091 total: 1.43s remaining: 302ms 825: learn: 460949.5001780 total: 1.43s remaining: 301ms 826: learn: 460542.1337752 total: 1.43s remaining: 299ms 827: learn: 460182.6678645 total: 1.43s remaining: 297ms 828: learn: 459910.7994632 total: 1.43s remaining: 296ms 829: learn: 459656.3126739 total: 1.43s remaining: 294ms 830: learn: 459299.6351659 total: 1.44s remaining: 292ms 831: learn: 459080.9737638 total: 1.44s remaining: 290ms 832: learn: 458932.8930712 total: 1.44s remaining: 289ms 833: learn: 458476.3799752 total: 1.44s remaining: 287ms 834: learn: 458175.0929910 total: 1.44s remaining: 285ms 835: learn: 457932.9252636 total: 1.44s remaining: 283ms 836: learn: 457670.8261835 total: 1.45s remaining: 282ms 837: learn: 457551.4259692 total: 1.45s remaining: 280ms 838: learn: 457122.0374862 total: 1.45s remaining: 278ms 839: learn: 456960.3722620 total: 1.45s remaining: 276ms 840: learn: 456743.1692987 total: 1.45s remaining: 275ms 841: learn: 456523.3476834 total: 1.45s remaining: 273ms 842: learn: 456243.6746123 total: 1.46s remaining: 271ms 843: learn: 455985.8045905 total: 1.46s remaining: 269ms 844: learn: 455852.8517650 total: 1.46s remaining: 268ms 845: learn: 455592.5742629 total: 1.46s remaining: 266ms 846: learn: 455384.0765482 total: 1.46s remaining: 264ms 847: learn: 454709.1179268 total: 1.47s remaining: 263ms 848: learn: 454598.5954008 total: 1.47s remaining: 261ms 849: learn: 454196.5678067 total: 1.47s remaining: 259ms 850: learn: 454079.7366132 total: 1.47s remaining: 258ms 851: learn: 453831.3675817 total: 1.47s remaining: 256ms 852: learn: 453571.7761230 total: 1.48s remaining: 254ms 853: learn: 453163.6416587 total: 1.48s remaining: 253ms 854: learn: 452977.9862674 total: 1.48s remaining: 251ms 855: learn: 452311.7988360 total: 1.48s remaining: 249ms 856: learn: 451896.5974678 total: 1.48s remaining: 248ms 857: learn: 451753.3448546 total: 1.49s remaining: 246ms 858: learn: 451490.1434259 total: 1.49s remaining: 244ms 859: learn: 450888.8595454 total: 1.49s remaining: 242ms 860: learn: 450701.3166750 total: 1.49s remaining: 241ms 861: learn: 450355.3486723 total: 1.49s remaining: 239ms 862: learn: 450195.8067969 total: 1.5s remaining: 237ms 863: learn: 449644.7781371 total: 1.5s remaining: 236ms 864: learn: 449265.8471640 total: 1.5s remaining: 234ms 865: learn: 449138.6088422 total: 1.5s remaining: 233ms 866: learn: 448897.2828697 total: 1.5s remaining: 231ms 867: learn: 448253.5302338 total: 1.51s remaining: 229ms 868: learn: 447833.0442902 total: 1.51s remaining: 228ms 869: learn: 447491.1420635 total: 1.51s remaining: 226ms 870: learn: 447134.1920003 total: 1.51s remaining: 224ms 871: learn: 446838.1400040 total: 1.52s remaining: 223ms 872: learn: 446650.9687321 total: 1.52s remaining: 221ms 873: learn: 446407.7123459 total: 1.52s remaining: 219ms 874: learn: 446252.6698551 total: 1.52s remaining: 218ms 875: learn: 446024.6656652 total: 1.52s remaining: 216ms 876: learn: 445933.7618302 total: 1.53s remaining: 214ms 877: learn: 445664.0064681 total: 1.53s remaining: 212ms 878: learn: 445476.6330796 total: 1.53s remaining: 211ms 879: learn: 445172.7699897 total: 1.53s remaining: 209ms 880: learn: 444985.7072165 total: 1.53s remaining: 207ms 881: learn: 444512.9007538 total: 1.54s remaining: 206ms 882: learn: 444121.3864509 total: 1.54s remaining: 204ms 883: learn: 443984.9876670 total: 1.54s remaining: 202ms 884: learn: 443848.2248005 total: 1.54s remaining: 200ms 885: learn: 443338.4339421 total: 1.54s remaining: 199ms 886: learn: 443150.6727184 total: 1.55s remaining: 197ms 887: learn: 442853.1760967 total: 1.55s remaining: 196ms 888: learn: 442649.0067228 total: 1.55s remaining: 194ms 889: learn: 442294.0687810 total: 1.55s remaining: 192ms 890: learn: 442201.6036129 total: 1.56s remaining: 190ms 891: learn: 442018.3220485 total: 1.56s remaining: 189ms 892: learn: 441452.4789688 total: 1.56s remaining: 187ms 893: learn: 441262.8575591 total: 1.56s remaining: 185ms 894: learn: 440965.0542903 total: 1.56s remaining: 184ms 895: learn: 440732.9915299 total: 1.57s remaining: 182ms 896: learn: 440395.7726270 total: 1.57s remaining: 180ms 897: learn: 440145.2499781 total: 1.57s remaining: 178ms 898: learn: 440029.5265502 total: 1.57s remaining: 177ms 899: learn: 439742.5193791 total: 1.57s remaining: 175ms 900: learn: 439499.9000665 total: 1.57s remaining: 173ms 901: learn: 439087.7265911 total: 1.58s remaining: 172ms 902: learn: 438966.8689017 total: 1.58s remaining: 170ms 903: learn: 438630.6230593 total: 1.58s remaining: 168ms 904: learn: 438489.6107591 total: 1.58s remaining: 166ms 905: learn: 438276.1912702 total: 1.59s remaining: 165ms 906: learn: 438138.5083262 total: 1.59s remaining: 163ms 907: learn: 437953.9278200 total: 1.59s remaining: 161ms 908: learn: 437530.4931529 total: 1.59s remaining: 159ms 909: learn: 437149.3479712 total: 1.59s remaining: 158ms 910: learn: 436840.4155829 total: 1.59s remaining: 156ms 911: learn: 436573.9108703 total: 1.6s remaining: 154ms 912: learn: 436248.6883443 total: 1.6s remaining: 152ms 913: learn: 436099.9385418 total: 1.6s remaining: 151ms 914: learn: 435986.8993413 total: 1.6s remaining: 149ms 915: learn: 435725.5262522 total: 1.61s remaining: 147ms 916: learn: 435556.7593045 total: 1.61s remaining: 146ms 917: learn: 435346.8384541 total: 1.61s remaining: 144ms 918: learn: 434988.0436501 total: 1.61s remaining: 142ms 919: learn: 434624.5469595 total: 1.61s remaining: 140ms 920: learn: 434085.0648729 total: 1.62s remaining: 139ms 921: learn: 433818.3536109 total: 1.62s remaining: 137ms 922: learn: 433672.2487993 total: 1.62s remaining: 135ms 923: learn: 433343.6780149 total: 1.62s remaining: 133ms 924: learn: 433010.9370766 total: 1.62s remaining: 132ms 925: learn: 432814.2458121 total: 1.63s remaining: 130ms 926: learn: 432443.4186648 total: 1.63s remaining: 128ms 927: learn: 432264.5284919 total: 1.63s remaining: 126ms 928: learn: 432116.8948556 total: 1.63s remaining: 125ms 929: learn: 432035.1579036 total: 1.63s remaining: 123ms 930: learn: 431686.9937490 total: 1.63s remaining: 121ms 931: learn: 431528.9562750 total: 1.64s remaining: 119ms 932: learn: 431099.9043614 total: 1.64s remaining: 118ms 933: learn: 430984.7014373 total: 1.64s remaining: 116ms 934: learn: 430901.4598615 total: 1.64s remaining: 114ms 935: learn: 430438.2590809 total: 1.64s remaining: 112ms 936: learn: 430099.3954092 total: 1.65s remaining: 111ms 937: learn: 429695.3873359 total: 1.65s remaining: 109ms 938: learn: 429418.1287791 total: 1.65s remaining: 107ms 939: learn: 429195.4337640 total: 1.65s remaining: 105ms 940: learn: 428706.0419968 total: 1.65s remaining: 104ms 941: learn: 428371.0145034 total: 1.66s remaining: 102ms 942: learn: 428098.1132403 total: 1.66s remaining: 100ms 943: learn: 428028.1573217 total: 1.66s remaining: 98.4ms 944: learn: 427767.4589936 total: 1.66s remaining: 96.7ms 945: learn: 427373.5548330 total: 1.66s remaining: 94.9ms 946: learn: 427325.2453061 total: 1.66s remaining: 93.1ms 947: learn: 426965.8483436 total: 1.67s remaining: 91.4ms 948: learn: 426834.6951193 total: 1.67s remaining: 89.6ms 949: learn: 426775.8797008 total: 1.67s remaining: 87.9ms 950: learn: 426592.0269151 total: 1.67s remaining: 86.1ms 951: learn: 426385.9026490 total: 1.67s remaining: 84.3ms 952: learn: 426246.1434272 total: 1.67s remaining: 82.6ms 953: learn: 425943.3090975 total: 1.68s remaining: 80.8ms 954: learn: 425804.1602212 total: 1.68s remaining: 79.1ms 955: learn: 425512.6731857 total: 1.68s remaining: 77.3ms 956: learn: 425352.2526325 total: 1.68s remaining: 75.5ms 957: learn: 425008.9809340 total: 1.68s remaining: 73.8ms 958: learn: 424739.1125656 total: 1.69s remaining: 72.1ms 959: learn: 424613.2072394 total: 1.69s remaining: 70.3ms 960: learn: 424200.9965139 total: 1.69s remaining: 68.6ms 961: learn: 424069.3485074 total: 1.69s remaining: 66.8ms 962: learn: 423845.4433255 total: 1.69s remaining: 65ms 963: learn: 423489.8301079 total: 1.69s remaining: 63.3ms 964: learn: 423231.6452796 total: 1.7s remaining: 61.5ms 965: learn: 423063.3658705 total: 1.7s remaining: 59.8ms 966: learn: 422927.0846705 total: 1.7s remaining: 58ms 967: learn: 422349.7398200 total: 1.7s remaining: 56.2ms 968: learn: 422251.3951729 total: 1.7s remaining: 54.5ms 969: learn: 422186.5967649 total: 1.7s remaining: 52.7ms 970: learn: 421989.5068491 total: 1.71s remaining: 51ms 971: learn: 421620.6848419 total: 1.71s remaining: 49.2ms 972: learn: 421500.5249182 total: 1.71s remaining: 47.5ms 973: learn: 421281.0273494 total: 1.71s remaining: 45.7ms 974: learn: 421157.0538342 total: 1.71s remaining: 44ms 975: learn: 421019.4970763 total: 1.72s remaining: 42.2ms 976: learn: 420713.4262552 total: 1.72s remaining: 40.4ms 977: learn: 420202.5589500 total: 1.72s remaining: 38.7ms 978: learn: 419942.9219338 total: 1.72s remaining: 36.9ms 979: learn: 419788.1796433 total: 1.72s remaining: 35.2ms 980: learn: 419630.5791648 total: 1.72s remaining: 33.4ms 981: learn: 419334.5019761 total: 1.73s remaining: 31.7ms 982: learn: 419174.2919824 total: 1.73s remaining: 29.9ms 983: learn: 418962.8443200 total: 1.73s remaining: 28.1ms 984: learn: 418783.0553754 total: 1.73s remaining: 26.4ms 985: learn: 418511.7834749 total: 1.73s remaining: 24.6ms 986: learn: 418384.7205589 total: 1.74s remaining: 22.9ms 987: learn: 418210.2548020 total: 1.74s remaining: 21.1ms 988: learn: 417998.8453985 total: 1.74s remaining: 19.3ms 989: learn: 417791.0122740 total: 1.74s remaining: 17.6ms 990: learn: 417574.0833751 total: 1.74s remaining: 15.8ms 991: learn: 417344.7112703 total: 1.74s remaining: 14.1ms 992: learn: 417027.7431051 total: 1.75s remaining: 12.3ms 993: learn: 416645.2205431 total: 1.75s remaining: 10.6ms 994: learn: 416459.9290075 total: 1.75s remaining: 8.79ms 995: learn: 416259.2156446 total: 1.75s remaining: 7.03ms 996: learn: 416047.9788972 total: 1.75s remaining: 5.28ms 997: learn: 415902.8651412 total: 1.75s remaining: 3.52ms 998: learn: 415599.4277603 total: 1.76s remaining: 1.76ms 999: learn: 415464.8669968 total: 1.76s remaining: 0us
from sklearn.metrics import r2_score,mean_squared_error
print('R-squared on train data :',r2_score(y_train, y_pred_train))
print('R-squared on test data :',r2_score(y_test, y_pred_test))
print('\nRMSE on train data :', np.sqrt(mean_squared_error(y_train, y_pred_train)))
print('RMSE on test data :', np.sqrt(mean_squared_error(y_test, y_pred_test)))
R-squared on train data : 0.9985854521040535 R-squared on test data : 0.9860854898443139 RMSE on train data : 415464.86699676275 RMSE on test data : 1331004.4139728015
mse=np.mean((y_pred_test-y_test)**2)
rmse=np.sqrt(mse)
print("root_mean_squared_error :", rmse)
mae=(abs(y_pred_test-y_test)).mean()
print("mean_absolute_error :", mae)
mape=(abs(y_test-y_pred_test)/y_pred_test).mean()
print("mean_absolute_percentage_error :",mape)
root_mean_squared_error : 1331004.413972802 mean_absolute_error : 415131.58686873293 mean_absolute_percentage_error : 0.13107357468387298
ypred=pd.DataFrame(y_pred_test,columns=["predicted"])
ytest=pd.DataFrame(y_test)
ytest=ytest.reset_index(drop=True)
new=pd.concat([ytest,ypred],axis=1)
new["actual_predicted"]=new["predicted"].apply(lambda x: "%.7f" % x)
new["actual_predicted"]=new["actual_predicted"].astype(float)
new=new.drop(["predicted"],axis=1)
new["actual_predicted"]=new["actual_predicted"].apply(np.ceil)
new
| Release Clause | actual_predicted | |
|---|---|---|
| 0 | 8500000.0 | 9419283.0 |
| 1 | 11500000.0 | 10069783.0 |
| 2 | 2300000.0 | 2488346.0 |
| 3 | 1400000.0 | 1751202.0 |
| 4 | 998000.0 | 969406.0 |
| ... | ... | ... |
| 4988 | 10200000.0 | 10695750.0 |
| 4989 | 522000.0 | 557240.0 |
| 4990 | 165000.0 | 177182.0 |
| 4991 | 979000.0 | 956979.0 |
| 4992 | 20800000.0 | 19630659.0 |
4993 rows × 2 columns
REGULARISATION
model_cols=['Age', 'Overall', 'Potential', 'Value', 'Wage', 'International Reputation', 'Crossing', 'Finishing', 'ShortPassing', 'Volleys', 'LongPassing', 'Stamina', 'Vision', 'Marking', 'StandingTackle', 'GKHandling', 'Body_Type_C. Ronaldo', 'Body_Type_Courtois', 'Body_Type_Messi', 'Real_Face_Yes', 'Position_CDM', 'Position_GK', 'Position_LF', 'Position_LS', 'Position_RAM', 'Position_RF', 'weight_group_over']
multi_x=np.column_stack(tuple(df[col] for col in model_cols))
y=df["Release Clause"]
multi_train_x,multi_test_x,multi_train_y,multi_test_y=train_test_split(multi_x,y,test_size=0.3)
from sklearn.metrics import mean_absolute_error,mean_squared_error,r2_score
from sklearn import linear_model
import sklearn.metrics
multi_model=linear_model.LinearRegression()
multi_model.fit(multi_train_x,multi_train_y)
multi_model_intercept=multi_model.intercept_
multi_coefficient=dict(zip(model_cols,multi_model.coef_))
multi_model_intercept
733722.3940899568
multi_coefficient
{'Age': 14976.172397755767,
'Overall': -42583.92784264459,
'Potential': 34283.23541728564,
'Value': 1.9748114573012572,
'Wage': 1.85797218691539,
'International Reputation': -770226.5824832785,
'Crossing': 3739.8020137839135,
'Finishing': 7786.308980215966,
'ShortPassing': 3359.129594245497,
'Volleys': -8560.178402658714,
'LongPassing': -4005.48036774599,
'Stamina': -7168.159601544103,
'Vision': -1499.9872599295124,
'Marking': -2966.9241259027817,
'StandingTackle': 5882.758555169945,
'GKHandling': 7474.662060547948,
'Body_Type_C. Ronaldo': -22384854.269877747,
'Body_Type_Courtois': 9961899.266395718,
'Body_Type_Messi': 13125064.388448808,
'Real_Face_Yes': 382650.9165839134,
'Position_CDM': 141454.9857702136,
'Position_GK': -368048.98571322876,
'Position_LF': -2631879.124687559,
'Position_LS': -499873.1838974353,
'Position_RAM': -1783346.3295032077,
'Position_RF': -2604157.3430842175,
'weight_group_over': -130204.43485616088}
def predict_metrics(lr,x,y):
pred=lr.predict(x)
mae=mean_absolute_error(y,pred)
mse=mean_squared_error(y,pred)
r2=r2_score(y,pred)
return mae,mse,r2
train_mae,train_mse,train_r2=predict_metrics(multi_model,multi_train_x,multi_train_y)
test_mae,test_mse,test_r2=predict_metrics(multi_model,multi_test_x,multi_test_y)
print(train_mae,train_mse,train_r2)
print(test_mae,test_mse,test_r2)
496907.47076216957 1242580881278.0315 0.9899351407614435 508168.5613845674 1280374552792.4653 0.9896739279163911
#RIDGE
from sklearn.linear_model import Ridge
r=Ridge(alpha=0.05,normalize=True)
r.fit(multi_train_x,multi_train_y)
train_mae,train_mse,train_r2=predict_metrics(r,multi_train_x,multi_train_y)
test_mae,test_mse,test_r2=predict_metrics(r,multi_test_x,multi_test_y)
print(train_mae,train_mse,train_r2)
print(test_mae,test_mse,test_r2)
#RMSE
print(np.sqrt(train_mse))
print(np.sqrt(test_mse))
697279.9742344046 2113903278659.7104 0.9828774615285003 689906.2573848163 1935353730947.1836 0.9843915969046276 1453926.8477676965 1391169.9144774457
#LASSO
from sklearn.linear_model import Lasso
lm=Lasso(alpha=0.05,normalize=True)
lm.fit(multi_train_x,multi_train_y)
train_mae,train_mse,train_r2=predict_metrics(lm,multi_train_x,multi_train_y)
test_mae,test_mse,test_r2=predict_metrics(lm,multi_test_x,multi_test_y)
print(train_mae,train_mse,train_r2)
print(test_mae,test_mse,test_r2)
#RMSE
print(np.sqrt(train_mse))
print(np.sqrt(test_mse))
496892.8851416481 1242580887898.9478 0.9899351407078143 508154.7968439397 1280368656243.724 0.989673975471372 1114711.1230713308 1131533.7627502433
#ELASTIC NET
from sklearn.linear_model import ElasticNet
e=ElasticNet(alpha=0.05)
e.fit(multi_train_x,multi_train_y)
train_mae,train_mse,train_r2=predict_metrics(e,multi_train_x,multi_train_y)
test_mae,test_mse,test_r2=predict_metrics(e,multi_test_x,multi_test_y)
print(train_mae,train_mse,train_r2)
print(test_mae,test_mse,test_r2)
#RMSE
print(np.sqrt(train_mse))
print(np.sqrt(test_mse))
496948.4984039156 1323348691796.6042 0.9892809244797317 500477.6516655898 1268535111613.285 0.9897694116346353 1150368.9372530032 1126292.6403085855
CLASSIFICATION
df=data[["Release Clause",'Age', 'Overall', 'Potential', 'Value', 'Wage', 'International Reputation', 'Crossing', 'Finishing', 'ShortPassing', 'Volleys', 'LongPassing', 'Stamina', 'Vision', 'Marking', 'StandingTackle', 'GKHandling', 'Body_Type_C. Ronaldo', 'Body_Type_Courtois', 'Body_Type_Messi', 'Real_Face_Yes', 'Position_CDM', 'Position_GK', 'Position_LF', 'Position_LS', 'Position_RAM', 'Position_RF', 'weight_group_over','Release_Clause_Class']]
df['Release_Clause_Class'].value_counts()
1.0 9150 0.0 7493 Name: Release_Clause_Class, dtype: int64
X=df.drop('Release_Clause_Class',axis=1)
y=df['Release_Clause_Class']
from sklearn.model_selection import train_test_split
from scipy.stats import chisquare
X_train,X_test,y_train,y_test=train_test_split(X,y,test_size=0.3,random_state=42)
chisquare([len(y),len(y_train),len(y_test)])
Power_divergenceResult(statistic=6157.790001802559, pvalue=0.0)
from sklearn.preprocessing import StandardScaler
sc=StandardScaler()
X_train=sc.fit_transform(X_train)
X_test=sc.fit_transform(X_test)
from sklearn.metrics import confusion_matrix,roc_auc_score,roc_curve,classification_report,accuracy_score
#Method 1
from sklearn.linear_model import LogisticRegression
classifier=LogisticRegression(random_state=42)
classifier.fit(X_train,y_train)
y_pred=classifier.predict(X_test)
from sklearn.metrics import confusion_matrix,roc_auc_score,roc_curve,classification_report,accuracy_score
cm=confusion_matrix(y_test,y_pred)
cm
array([[2157, 57],
[ 164, 2615]])
#Method 2
from sklearn.neighbors import KNeighborsClassifier
classifier=KNeighborsClassifier(n_neighbors=5,metric="minkowski",p=2)
classifier.fit(X_train,y_train)
y_pred=classifier.predict(X_test)
cm=confusion_matrix(y_test,y_pred)
cm
array([[2053, 161],
[ 228, 2551]])
# Method 3
from sklearn.tree import DecisionTreeClassifier
classifier=DecisionTreeClassifier(criterion="entropy",random_state=42)
classifier.fit(X_train,y_train)
y_pred=classifier.predict(X_test)
cm=confusion_matrix(y_test,y_pred)
cm
array([[2214, 0],
[ 94, 2685]])
# Method 4
from sklearn.ensemble import RandomForestClassifier
classi=RandomForestClassifier(n_estimators=50,criterion="entropy",random_state=42)
classi.fit(X_train,y_train)
y_pred=classi.predict(X_test)
cm=confusion_matrix(y_test,y_pred)
cm
array([[2214, 0],
[ 94, 2685]])
y_train_pred=classifier.predict(X_train)
y_train_prob=classifier.predict_proba(X_train)[:,1]
print("accuracy on train : ",accuracy_score(y_train,y_train_pred))
print("auc on train : ",roc_auc_score(y_train,y_train_prob))
y_test_pred=classifier.predict(X_test)
y_test_prob=classifier.predict_proba(X_test)[:,1]
print("accuracy on test : ",accuracy_score(y_test,y_test_pred))
print("auc on test : ",roc_auc_score(y_test,y_test_prob))
accuracy on train : 1.0 auc on train : 1.0 accuracy on test : 0.9811736431003405 auc on test : 0.9830874415257287
print(classification_report(y_test,y_pred))
precision recall f1-score support
0.0 0.96 1.00 0.98 2214
1.0 1.00 0.97 0.98 2779
accuracy 0.98 4993
macro avg 0.98 0.98 0.98 4993
weighted avg 0.98 0.98 0.98 4993
print("confusion matrix : ","\n", confusion_matrix(y_train,y_train_pred))
print("confusion matrix : ","\n", confusion_matrix(y_test,y_test_pred))
confusion matrix : [[5279 0] [ 0 6371]] confusion matrix : [[2214 0] [ 94 2685]]
cf=confusion_matrix(y_test,y_test_pred)
tn=cf[0,0]
fp=cf[0,1]
fn=cf[1,0]
tp=cf[1,1]
t=tn+fp+fn+tp
print(t)
s=(tp+tn)/t
print(s)
error=(fp+fn)/t
print(error)
sensitivity=tp/(fn+tp)
print(sensitivity) # recall score
specificity=tn/(tn+fp)
print(specificity)
precision=tp/(tp+fp)
print(precision)
4993 0.9811736431003405 0.018826356899659523 0.9661748830514574 1.0 1.0
y_prob=classi.predict_proba(X_test)
y_prob[0:10]
array([[0. , 1. ],
[0. , 1. ],
[0. , 1. ],
[0. , 1. ],
[0.94, 0.06],
[0. , 1. ],
[0. , 1. ],
[0.94, 0.06],
[0.96, 0.04],
[0. , 1. ]])
y_prob[:,1]
array([1. , 1. , 1. , ..., 0. , 0.02, 1. ])
fpr,tpr,thresholds=roc_curve(y_test,y_prob[:,1])
plt.plot(fpr,tpr)
plt.xlim([0.0,1.0])
plt.ylim([0.0,1.0])
plt.rcParams["font.size"]=12
plt.title("roc curve")
plt.xlabel("false positive rate")
plt.ylabel("true positive rate")
plt.grid(True)
from sklearn import metrics
print(metrics.accuracy_score(y_test,y_pred))
print(metrics.precision_score(y_test,y_pred))
print(metrics.recall_score(y_test,y_pred))
0.9811736431003405 1.0 0.9661748830514574
y_pred_proba=classi.predict_proba(X_test)[::,1]
fpr,tpr,_=metrics.roc_curve(y_test,y_pred_proba)
auc=metrics.roc_auc_score(y_test,y_pred_proba)
plt.plot(fpr,tpr,label="data 1 , auc ="+str(auc))
plt.legend(loc=4)
plt.show()
fpr , tpr , thresholds = roc_curve(y_train,y_train_prob)
plt.plot(fpr,tpr,color="r")
plt.plot(fpr,fpr,color="g")
plt.xlabel("FPR")
plt.ylabel("TPR")
plt.title("AUC")
plt.show()
thresholds[0]=thresholds[0]-1
fig,ax=plt.subplots(figsize=(10,5))
ax.plot(fpr,tpr,color="r")
ax.plot(fpr,fpr)
ax.set_xlabel("FPR")
ax.set_ylabel("TPR")
ax1=ax.twinx()
ax1.plot(fpr,thresholds)
ax1.set_ylabel("THRESHOLDS")
plt.show()
y_pred_class=y_prob[:,1]
sns.distplot(y_pred_class,kde=False)
plt.show()
for i in range(y_pred_class.size):
print(y_pred_class[i],y_pred[i])
1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.06 0.0 1.0 1.0 1.0 1.0 0.06 0.0 0.04 0.0 1.0 1.0 0.02 0.0 0.0 0.0 1.0 1.0 0.96 1.0 0.0 0.0 0.96 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 0.02 0.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.96 1.0 0.0 0.0 1.0 1.0 0.02 0.0 0.0 0.0 0.96 1.0 1.0 1.0 1.0 1.0 0.96 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.94 1.0 1.0 1.0 0.88 1.0 0.06 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.94 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.92 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.06 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.98 1.0 0.22 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.08 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.92 1.0 1.0 1.0 0.0 0.0 0.08 0.0 0.0 0.0 0.86 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.08 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.96 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.98 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.14 0.0 0.02 0.0 0.14 0.0 0.98 1.0 0.0 0.0 0.96 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.08 0.0 0.16 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.96 1.0 0.0 0.0 1.0 1.0 0.06 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.96 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.04 0.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 0.08 0.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 0.02 0.0 0.04 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 0.94 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.08 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 0.04 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.96 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.06 0.0 0.02 0.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.94 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.1 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.98 1.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.94 1.0 1.0 1.0 1.0 1.0 0.76 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.08 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.04 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.06 0.0 0.0 0.0 1.0 1.0 0.92 1.0 0.92 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.06 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.04 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.96 1.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.2 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 0.1 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.04 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.04 0.0 0.0 0.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.98 1.0 0.02 0.0 0.02 0.0 0.06 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 0.96 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.94 1.0 0.9 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.96 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.16 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.96 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 0.9 1.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.02 0.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.24 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.94 1.0 1.0 1.0 0.0 0.0 0.94 1.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.18 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.04 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.92 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 0.02 0.0 0.02 0.0 1.0 1.0 0.96 1.0 0.04 0.0 1.0 1.0 0.0 0.0 0.92 1.0 0.98 1.0 1.0 1.0 0.0 0.0 0.92 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.08 0.0 1.0 1.0 0.92 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.06 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.06 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.08 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.94 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.9 1.0 0.94 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.96 1.0 1.0 1.0 0.04 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.06 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.94 1.0 0.06 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.18 0.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.94 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.06 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.08 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.0 0.0 0.94 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.96 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.98 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.98 1.0 0.94 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 1.0 1.0 0.04 0.0 0.08 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.88 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.98 1.0 0.94 1.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.88 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.88 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 0.0 0.0 0.96 1.0 0.0 0.0 0.08 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.98 1.0 0.96 1.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 0.96 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 0.0 0.0 0.0 0.0 0.1 0.0 0.88 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.08 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 0.02 0.0 0.02 0.0 0.98 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.9 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.9 1.0 0.94 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.96 1.0 0.14 0.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.9 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.92 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.9 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.1 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.94 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.08 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.94 1.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 0.94 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.92 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.98 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.92 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.88 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.92 1.0 0.02 0.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.1 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.96 1.0 0.0 0.0 1.0 1.0 0.02 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.96 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.04 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.9 1.0 1.0 1.0 0.96 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.94 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.98 1.0 0.96 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 0.0 0.0 0.14 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.98 1.0 1.0 1.0 0.92 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.04 0.0 1.0 1.0 0.98 1.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.12 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.04 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.98 1.0 0.04 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.14 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.9 1.0 0.02 0.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.88 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.04 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.96 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.94 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.08 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.9 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 0.02 0.0 0.98 1.0 1.0 1.0 0.92 1.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 0.96 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 0.96 1.0 0.02 0.0 1.0 1.0 0.02 0.0 0.98 1.0 0.0 0.0 1.0 1.0 0.04 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.94 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.94 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.94 1.0 0.98 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.92 1.0 1.0 1.0 0.0 0.0 0.86 1.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.04 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 0.02 0.0 0.96 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.82 1.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.94 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 1.0 1.0 0.04 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.88 1.0 1.0 1.0 1.0 1.0 0.18 0.0 0.06 0.0 0.08 0.0 0.08 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 0.98 1.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.08 0.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.98 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.06 0.0 1.0 1.0 1.0 1.0 0.96 1.0 0.02 0.0 0.98 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 0.96 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.82 1.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.08 0.0 0.0 0.0 0.0 0.0 0.9 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 0.18 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.06 0.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.08 0.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.06 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.04 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.92 1.0 1.0 1.0 0.0 0.0 0.98 1.0 1.0 1.0 0.94 1.0 0.04 0.0 0.0 0.0 1.0 1.0 0.02 0.0 0.04 0.0 0.0 0.0 0.9 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.12 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.08 0.0 0.0 0.0 1.0 1.0 0.94 1.0 0.98 1.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.96 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.96 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 0.08 0.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.06 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.04 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.92 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.1 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.94 1.0 0.06 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.2 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.04 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.9 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.04 0.0 0.98 1.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.94 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.9 1.0 0.22 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.1 0.0 0.0 0.0 0.04 0.0 1.0 1.0 0.88 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.98 1.0 1.0 1.0 0.94 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.84 1.0 0.04 0.0 1.0 1.0 0.02 0.0 0.08 0.0 0.86 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.96 1.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.12 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.98 1.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 0.06 0.0 0.98 1.0 0.0 0.0 0.06 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.08 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.94 1.0 1.0 1.0 0.0 0.0 0.92 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.94 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.9 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.94 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 1.0 1.0 0.96 1.0 0.0 0.0 1.0 1.0 0.92 1.0 1.0 1.0 0.0 0.0 0.92 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.04 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.18 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.9 1.0 1.0 1.0 0.0 0.0 0.08 0.0 0.0 0.0 1.0 1.0 0.92 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.1 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 0.96 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.04 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.92 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 0.02 0.0 0.04 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.96 1.0 0.92 1.0 0.96 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.94 1.0 0.02 0.0 1.0 1.0 0.94 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.84 1.0 1.0 1.0 0.02 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.04 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.98 1.0 0.9 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.12 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.92 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.06 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.9 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 0.06 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.96 1.0 0.04 0.0 0.04 0.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.02 0.0 0.02 0.0 1.0 1.0 0.1 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.96 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.96 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.96 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.92 1.0 0.0 0.0 1.0 1.0 0.96 1.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.86 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 0.94 1.0 1.0 1.0 1.0 1.0 0.18 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.04 0.0 1.0 1.0 0.06 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.9 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.94 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.9 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.06 0.0 0.88 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.04 0.0 0.0 0.0 0.0 0.0 0.04 0.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.04 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.94 1.0 0.96 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.04 0.0 1.0 1.0 0.96 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.08 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.02 0.0 0.0 0.0 0.98 1.0 0.0 0.0 0.96 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.06 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.98 1.0 1.0 1.0 0.04 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.98 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.92 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.94 1.0 0.04 0.0 1.0 1.0 1.0 1.0 0.92 1.0 0.12 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.96 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.04 0.0 0.0 0.0 0.0 0.0 0.12 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.92 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.2 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.94 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.96 1.0 0.02 0.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.98 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.2 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.84 1.0 1.0 1.0 0.88 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.14 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.04 0.0 0.02 0.0 0.92 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.04 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.14 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.98 1.0 1.0 1.0 0.06 0.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 0.0 0.0 0.96 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.04 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.04 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.96 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.9 1.0 0.92 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.04 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.18 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.96 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.88 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.2 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.92 1.0 0.96 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.06 0.0 0.94 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 0.18 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.04 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 0.94 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.9 1.0 1.0 1.0 1.0 1.0 0.8 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.9 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.04 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.04 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.06 0.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 0.06 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.1 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 0.04 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.04 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.06 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.04 0.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.9 1.0 0.0 0.0 0.02 0.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.08 0.0 0.94 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.98 1.0 0.98 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.04 0.0 1.0 1.0 0.0 0.0 0.96 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.04 0.0 0.92 1.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 0.96 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.08 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.96 1.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.9 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.06 0.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.96 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.98 1.0 0.94 1.0 1.0 1.0 1.0 1.0 0.04 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.04 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.94 1.0 0.0 0.0 0.98 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.1 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.96 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.96 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.9 1.0 1.0 1.0 0.0 0.0 0.98 1.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.84 1.0 0.1 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.96 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 0.04 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.04 0.0 1.0 1.0 0.0 0.0 0.12 0.0 1.0 1.0 0.98 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.06 0.0 0.0 0.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.04 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.94 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.04 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.96 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.96 1.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.96 1.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.92 1.0 1.0 1.0 0.98 1.0 0.06 0.0 1.0 1.0 0.06 0.0 1.0 1.0 0.98 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 0.02 0.0 0.04 0.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.06 0.0 0.94 1.0 0.0 0.0 1.0 1.0 0.14 0.0 1.0 1.0 0.94 1.0 0.94 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.96 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.94 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.06 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.98 1.0 0.06 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.04 0.0 0.9 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.92 1.0 0.0 0.0 0.96 1.0 0.02 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.1 0.0 0.0 0.0 0.0 0.0 0.04 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 0.98 1.0 1.0 1.0 0.0 0.0 0.98 1.0 0.04 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.02 0.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.94 1.0 0.98 1.0 0.98 1.0 0.0 0.0 0.96 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.04 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.96 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 0.98 1.0 0.0 0.0 0.0 0.0 0.92 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.04 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.96 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.02 0.0 0.0 0.0 0.94 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.98 1.0 0.02 0.0 1.0 1.0 0.0 0.0 0.1 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 0.96 1.0 1.0 1.0 0.0 0.0 0.98 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.04 0.0 1.0 1.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.22 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.96 1.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.96 1.0 0.96 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.06 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 1.0 1.0 0.96 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 0.36 0.0 1.0 1.0 1.0 1.0 0.02 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.02 0.0 1.0 1.0 0.0 0.0 0.9 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.98 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.9 1.0 1.0 1.0 0.0 0.0 0.04 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.98 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 0.98 1.0 0.0 0.0 0.92 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.06 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.94 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.94 1.0 0.0 0.0 0.94 1.0 0.0 0.0 1.0 1.0 0.08 0.0 0.0 0.0 0.04 0.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.08 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 0.96 1.0 0.0 0.0 1.0 1.0 0.18 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.04 0.0 0.96 1.0 0.0 0.0 0.02 0.0 1.0 1.0 0.04 0.0 1.0 1.0 1.0 1.0 0.1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.94 1.0 0.06 0.0 1.0 1.0 0.98 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.04 0.0 0.0 0.0 1.0 1.0 0.9 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.94 1.0 0.04 0.0 0.98 1.0 1.0 1.0 0.02 0.0 0.02 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.14 0.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 0.98 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.94 1.0 1.0 1.0 0.0 0.0 0.12 0.0 0.0 0.0 0.02 0.0 1.0 1.0 0.92 1.0 0.98 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.0 0.0 0.02 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.94 1.0 0.96 1.0 1.0 1.0 0.9 1.0 0.04 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 0.12 0.0 0.0 0.0 0.02 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.02 0.0 0.0 0.0 0.02 0.0 0.0 0.0 1.0 1.0 0.94 1.0 1.0 1.0 1.0 1.0 0.84 1.0 0.98 1.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.98 1.0 0.04 0.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.88 1.0 1.0 1.0 0.0 0.0 1.0 1.0 0.0 0.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.84 1.0 0.12 0.0 0.0 0.0 0.98 1.0 0.18 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.02 0.0 1.0 1.0
y_pred_adj=y_pred
for i in range (y_pred_class.size):
if y_pred_class[i]>0.3:
y_pred_adj[i]=1
else:
y_pred_adj[i]=0
confusion_matrix(y_test,y_pred_adj)
array([[2214, 0],
[ 93, 2686]])
classi=RandomForestClassifier(max_depth=3,random_state=42)
classi.fit(X_train,y_train)
y_train_pred=classi.predict(X_train)
y_train_prob=classi.predict_proba(X_train)[:,1]
print("accuracy on train : ",accuracy_score(y_train,y_train_pred))
print("auc on train : ",roc_auc_score(y_train,y_train_prob))
print("confusion matrix : ","\n", confusion_matrix(y_train,y_train_pred))
y_test_pred=classi.predict(X_test)
y_test_prob=classi.predict_proba(X_test)[:,1]
print("accuracy on test : ",accuracy_score(y_test,y_test_pred))
print("auc on test : ",roc_auc_score(y_test,y_test_prob))
print("confusion matrix : ","\n", confusion_matrix(y_test,y_test_pred))
accuracy on train : 1.0 auc on train : 1.0 confusion matrix : [[5279 0] [ 0 6371]] accuracy on test : 0.9811736431003405 auc on test : 0.9947563722368662 confusion matrix : [[2214 0] [ 94 2685]]
from sklearn.model_selection import RandomizedSearchCV
from scipy.stats import randint as sp
z=RandomForestClassifier(random_state=42)
params={"n_estimators":sp(50,200),"max_features":sp(1,10),"max_depth":sp(2,10),"min_samples_split":sp(2,20),
"min_samples_leaf":sp(1,20)}
rsearch=RandomizedSearchCV(z,param_distributions=params,n_iter=5,cv=3,scoring="roc_auc",random_state=42,
return_train_score=True,n_jobs=1)
rsearch.fit(X_train,y_train)
RandomizedSearchCV(cv=3, estimator=RandomForestClassifier(random_state=42),
n_iter=5, n_jobs=1,
param_distributions={'max_depth': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839b3f0310>,
'max_features': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839b3f09d0>,
'min_samples_leaf': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839b3f0f10>,
'min_samples_split': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839be80fd0>,
'n_estimators': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839b3f0b20>},
random_state=42, return_train_score=True, scoring='roc_auc')In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. RandomizedSearchCV(cv=3, estimator=RandomForestClassifier(random_state=42),
n_iter=5, n_jobs=1,
param_distributions={'max_depth': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839b3f0310>,
'max_features': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839b3f09d0>,
'min_samples_leaf': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839b3f0f10>,
'min_samples_split': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839be80fd0>,
'n_estimators': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839b3f0b20>},
random_state=42, return_train_score=True, scoring='roc_auc')RandomForestClassifier(random_state=42)
RandomForestClassifier(random_state=42)
z=RandomForestClassifier(random_state=42)
params={"n_estimators":sp(50,200),"max_features":sp(1,10),"max_depth":sp(2,10),"min_samples_split":sp(2,20),
"min_samples_leaf":sp(1,20)}
rsearch=RandomizedSearchCV(z,param_distributions=params,n_iter=10,cv=3,scoring="roc_auc",random_state=42,
return_train_score=True,n_jobs=1)
rsearch.fit(X_train,y_train)
RandomizedSearchCV(cv=3, estimator=RandomForestClassifier(random_state=42),
n_jobs=1,
param_distributions={'max_depth': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839d63adc0>,
'max_features': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839d63a280>,
'min_samples_leaf': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839bedcca0>,
'min_samples_split': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839becd850>,
'n_estimators': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839becd070>},
random_state=42, return_train_score=True, scoring='roc_auc')In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. RandomizedSearchCV(cv=3, estimator=RandomForestClassifier(random_state=42),
n_jobs=1,
param_distributions={'max_depth': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839d63adc0>,
'max_features': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839d63a280>,
'min_samples_leaf': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839bedcca0>,
'min_samples_split': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839becd850>,
'n_estimators': <scipy.stats._distn_infrastructure.rv_frozen object at 0x7f839becd070>},
random_state=42, return_train_score=True, scoring='roc_auc')RandomForestClassifier(random_state=42)
RandomForestClassifier(random_state=42)
rsearch.best_params_
{'max_depth': 8,
'max_features': 4,
'min_samples_leaf': 15,
'min_samples_split': 12,
'n_estimators': 121}
rs=pd.DataFrame(rsearch.cv_results_)
rs.head(5)
| mean_fit_time | std_fit_time | mean_score_time | std_score_time | param_max_depth | param_max_features | param_min_samples_leaf | param_min_samples_split | param_n_estimators | params | ... | split1_test_score | split2_test_score | mean_test_score | std_test_score | rank_test_score | split0_train_score | split1_train_score | split2_train_score | mean_train_score | std_train_score | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.442421 | 0.022022 | 0.038334 | 0.002500 | 8 | 4 | 15 | 12 | 121 | {'max_depth': 8, 'max_features': 4, 'min_sampl... | ... | 1.000000 | 1.000000 | 1.000000 | 0.00000 | 1 | 1.000000 | 1.000000 | 1.000000 | 1.000000 | 0.000000 |
| 1 | 0.514672 | 0.007065 | 0.038077 | 0.001412 | 6 | 5 | 7 | 12 | 137 | {'max_depth': 6, 'max_features': 5, 'min_sampl... | ... | 1.000000 | 1.000000 | 1.000000 | 0.00000 | 1 | 1.000000 | 1.000000 | 1.000000 | 1.000000 | 0.000000 |
| 2 | 0.657688 | 0.016793 | 0.050880 | 0.002381 | 6 | 4 | 8 | 4 | 199 | {'max_depth': 6, 'max_features': 4, 'min_sampl... | ... | 1.000000 | 1.000000 | 1.000000 | 0.00000 | 1 | 1.000000 | 1.000000 | 1.000000 | 1.000000 | 0.000000 |
| 3 | 0.476691 | 0.026048 | 0.052222 | 0.003117 | 6 | 2 | 12 | 7 | 179 | {'max_depth': 6, 'max_features': 2, 'min_sampl... | ... | 0.999804 | 0.999912 | 0.999874 | 0.00005 | 10 | 0.999942 | 0.999934 | 0.999933 | 0.999936 | 0.000004 |
| 4 | 0.429142 | 0.013031 | 0.034635 | 0.000513 | 9 | 5 | 1 | 13 | 107 | {'max_depth': 9, 'max_features': 5, 'min_sampl... | ... | 1.000000 | 1.000000 | 1.000000 | 0.00000 | 1 | 1.000000 | 1.000000 | 1.000000 | 1.000000 | 0.000000 |
5 rows × 21 columns
rs.groupby("param_max_depth")["mean_test_score","mean_train_score"].agg("mean").plot()
<AxesSubplot:xlabel='param_max_depth'>
classi=RandomForestClassifier(**rsearch.best_params_)
classi.fit(X_train,y_train)
RandomForestClassifier(max_depth=8, max_features=4, min_samples_leaf=15,
min_samples_split=12, n_estimators=121)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. RandomForestClassifier(max_depth=8, max_features=4, min_samples_leaf=15,
min_samples_split=12, n_estimators=121)y_train_pred=classi.predict(X_train)
y_train_prob=classi.predict_proba(X_train)[:,1]
print("accuracy on train : ",accuracy_score(y_train,y_train_pred))
print("auc on train : ",roc_auc_score(y_train,y_train_prob))
print("confusion matrix : ","\n", confusion_matrix(y_train,y_train_pred))
y_test_pred=classi.predict(X_test)
y_test_prob=classi.predict_proba(X_test)[:,1]
print("accuracy on test : ",accuracy_score(y_test,y_test_pred))
print("auc on test : ",roc_auc_score(y_test,y_test_prob))
print("confusion matrix : ","\n", confusion_matrix(y_test,y_test_pred))
accuracy on train : 1.0 auc on train : 1.0 confusion matrix : [[5279 0] [ 0 6371]] accuracy on test : 0.9811736431003405 auc on test : 0.9972751826594674 confusion matrix : [[2214 0] [ 94 2685]]
BOOSTING
from sklearn.ensemble import AdaBoostClassifier
classi=AdaBoostClassifier()
classi.fit(X_train,y_train)
y_train_pred=classi.predict(X_train)
y_train_prob=classi.predict_proba(X_train)[:,1]
print("accuracy on train : ",accuracy_score(y_train,y_train_pred))
print("auc on train : ",roc_auc_score(y_train,y_train_prob))
print("confusion matrix : ","\n", confusion_matrix(y_train,y_train_pred))
y_test_pred=classi.predict(X_test)
y_test_prob=classi.predict_proba(X_test)[:,1]
print("accuracy on test : ",accuracy_score(y_test,y_test_pred))
print("auc on test : ",roc_auc_score(y_test,y_test_prob))
print("confusion matrix : ","\n", confusion_matrix(y_test,y_test_pred))
accuracy on train : 1.0 auc on train : 1.0 confusion matrix : [[5279 0] [ 0 6371]] accuracy on test : 0.9811736431003405 auc on test : 0.9830874415257287 confusion matrix : [[2214 0] [ 94 2685]]
MODELS COMPARISON
df.columns.get_loc("Release_Clause_Class")
28
array=df.values
X=array[:,[11,12,13,14,15,16,17,18,19,20]]
y=array[:,28]
models = []
models.append(('LR', LogisticRegression()))
models.append(('RF', RandomForestClassifier()))
models.append(('KNN', KNeighborsClassifier()))
models.append(('DT', DecisionTreeClassifier()))
from sklearn import model_selection
results = []
names = []
scoring = 'accuracy'
for name, model in models:
kfold = model_selection.KFold(n_splits=10, random_state=None)
cv_results = model_selection.cross_val_score(model, X, y, cv=kfold, scoring=scoring)
results.append(cv_results)
names.append(name)
msg = "%s: %f (%f)" % (name, cv_results.mean(), cv_results.std())
print(msg)
LR: 0.679536 (0.159494) RF: 0.775191 (0.160385) KNN: 0.745928 (0.156370) DT: 0.697926 (0.126725)
# boxplot algorithm comparison
fig = plt.figure()
fig.suptitle('Algorithm Comparison')
ax = fig.add_subplot(211)
plt.boxplot(results)
ax.set_xticklabels(names)
plt.show()